var/home/core/zuul-output/0000755000175000017500000000000015126451231014525 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015126465641015503 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005535506015126465632017720 0ustar rootrootJan 04 11:48:03 crc systemd[1]: Starting Kubernetes Kubelet... Jan 04 11:48:03 crc restorecon[4744]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:03 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 04 11:48:04 crc restorecon[4744]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 04 11:48:04 crc restorecon[4744]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Jan 04 11:48:04 crc kubenswrapper[5003]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 04 11:48:04 crc kubenswrapper[5003]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Jan 04 11:48:04 crc kubenswrapper[5003]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 04 11:48:04 crc kubenswrapper[5003]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 04 11:48:04 crc kubenswrapper[5003]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Jan 04 11:48:04 crc kubenswrapper[5003]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.660915 5003 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664629 5003 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664667 5003 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664674 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664678 5003 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664684 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664689 5003 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664693 5003 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664697 5003 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664701 5003 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664707 5003 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664712 5003 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664717 5003 feature_gate.go:330] unrecognized feature gate: Example Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664722 5003 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664728 5003 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664733 5003 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664738 5003 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664743 5003 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664752 5003 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664756 5003 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664759 5003 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664763 5003 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664768 5003 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664773 5003 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664776 5003 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664780 5003 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664784 5003 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664789 5003 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664794 5003 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664799 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664803 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664807 5003 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664811 5003 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664815 5003 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664819 5003 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664823 5003 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664828 5003 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664832 5003 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664836 5003 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664840 5003 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664845 5003 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664850 5003 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664854 5003 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664857 5003 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664867 5003 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664871 5003 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664875 5003 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664879 5003 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664883 5003 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664887 5003 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664891 5003 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664894 5003 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664898 5003 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664901 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664911 5003 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664916 5003 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664920 5003 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664923 5003 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664927 5003 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664930 5003 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664934 5003 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664938 5003 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664941 5003 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664944 5003 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664949 5003 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664952 5003 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664957 5003 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664961 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664970 5003 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664975 5003 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664978 5003 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.664982 5003 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665426 5003 flags.go:64] FLAG: --address="0.0.0.0" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665439 5003 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665451 5003 flags.go:64] FLAG: --anonymous-auth="true" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665457 5003 flags.go:64] FLAG: --application-metrics-count-limit="100" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665462 5003 flags.go:64] FLAG: --authentication-token-webhook="false" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665466 5003 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665473 5003 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665478 5003 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665482 5003 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665488 5003 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665492 5003 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665496 5003 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665500 5003 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665506 5003 flags.go:64] FLAG: --cgroup-root="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665510 5003 flags.go:64] FLAG: --cgroups-per-qos="true" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665514 5003 flags.go:64] FLAG: --client-ca-file="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665518 5003 flags.go:64] FLAG: --cloud-config="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665522 5003 flags.go:64] FLAG: --cloud-provider="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665531 5003 flags.go:64] FLAG: --cluster-dns="[]" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665538 5003 flags.go:64] FLAG: --cluster-domain="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665542 5003 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665546 5003 flags.go:64] FLAG: --config-dir="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665551 5003 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665557 5003 flags.go:64] FLAG: --container-log-max-files="5" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665563 5003 flags.go:64] FLAG: --container-log-max-size="10Mi" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665568 5003 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665573 5003 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665578 5003 flags.go:64] FLAG: --containerd-namespace="k8s.io" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665583 5003 flags.go:64] FLAG: --contention-profiling="false" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665588 5003 flags.go:64] FLAG: --cpu-cfs-quota="true" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665592 5003 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665597 5003 flags.go:64] FLAG: --cpu-manager-policy="none" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665601 5003 flags.go:64] FLAG: --cpu-manager-policy-options="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665607 5003 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665611 5003 flags.go:64] FLAG: --enable-controller-attach-detach="true" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665617 5003 flags.go:64] FLAG: --enable-debugging-handlers="true" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665622 5003 flags.go:64] FLAG: --enable-load-reader="false" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665626 5003 flags.go:64] FLAG: --enable-server="true" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665630 5003 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665638 5003 flags.go:64] FLAG: --event-burst="100" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665642 5003 flags.go:64] FLAG: --event-qps="50" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665647 5003 flags.go:64] FLAG: --event-storage-age-limit="default=0" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665652 5003 flags.go:64] FLAG: --event-storage-event-limit="default=0" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665656 5003 flags.go:64] FLAG: --eviction-hard="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665662 5003 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665666 5003 flags.go:64] FLAG: --eviction-minimum-reclaim="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665670 5003 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665674 5003 flags.go:64] FLAG: --eviction-soft="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665678 5003 flags.go:64] FLAG: --eviction-soft-grace-period="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665682 5003 flags.go:64] FLAG: --exit-on-lock-contention="false" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665686 5003 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665690 5003 flags.go:64] FLAG: --experimental-mounter-path="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665694 5003 flags.go:64] FLAG: --fail-cgroupv1="false" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665698 5003 flags.go:64] FLAG: --fail-swap-on="true" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665707 5003 flags.go:64] FLAG: --feature-gates="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665712 5003 flags.go:64] FLAG: --file-check-frequency="20s" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665717 5003 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665721 5003 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665725 5003 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665729 5003 flags.go:64] FLAG: --healthz-port="10248" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665738 5003 flags.go:64] FLAG: --help="false" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665743 5003 flags.go:64] FLAG: --hostname-override="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665746 5003 flags.go:64] FLAG: --housekeeping-interval="10s" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665751 5003 flags.go:64] FLAG: --http-check-frequency="20s" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665755 5003 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665758 5003 flags.go:64] FLAG: --image-credential-provider-config="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665762 5003 flags.go:64] FLAG: --image-gc-high-threshold="85" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665767 5003 flags.go:64] FLAG: --image-gc-low-threshold="80" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665771 5003 flags.go:64] FLAG: --image-service-endpoint="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665775 5003 flags.go:64] FLAG: --kernel-memcg-notification="false" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665779 5003 flags.go:64] FLAG: --kube-api-burst="100" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665784 5003 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665788 5003 flags.go:64] FLAG: --kube-api-qps="50" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665792 5003 flags.go:64] FLAG: --kube-reserved="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665796 5003 flags.go:64] FLAG: --kube-reserved-cgroup="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665800 5003 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665804 5003 flags.go:64] FLAG: --kubelet-cgroups="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665808 5003 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665812 5003 flags.go:64] FLAG: --lock-file="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665816 5003 flags.go:64] FLAG: --log-cadvisor-usage="false" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665820 5003 flags.go:64] FLAG: --log-flush-frequency="5s" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665824 5003 flags.go:64] FLAG: --log-json-info-buffer-size="0" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665831 5003 flags.go:64] FLAG: --log-json-split-stream="false" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665835 5003 flags.go:64] FLAG: --log-text-info-buffer-size="0" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665839 5003 flags.go:64] FLAG: --log-text-split-stream="false" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665843 5003 flags.go:64] FLAG: --logging-format="text" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665847 5003 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665852 5003 flags.go:64] FLAG: --make-iptables-util-chains="true" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665855 5003 flags.go:64] FLAG: --manifest-url="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665860 5003 flags.go:64] FLAG: --manifest-url-header="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665870 5003 flags.go:64] FLAG: --max-housekeeping-interval="15s" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665874 5003 flags.go:64] FLAG: --max-open-files="1000000" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665879 5003 flags.go:64] FLAG: --max-pods="110" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665884 5003 flags.go:64] FLAG: --maximum-dead-containers="-1" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665888 5003 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665892 5003 flags.go:64] FLAG: --memory-manager-policy="None" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665896 5003 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665901 5003 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665905 5003 flags.go:64] FLAG: --node-ip="192.168.126.11" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665909 5003 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665919 5003 flags.go:64] FLAG: --node-status-max-images="50" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665923 5003 flags.go:64] FLAG: --node-status-update-frequency="10s" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665927 5003 flags.go:64] FLAG: --oom-score-adj="-999" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665931 5003 flags.go:64] FLAG: --pod-cidr="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665935 5003 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665943 5003 flags.go:64] FLAG: --pod-manifest-path="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665947 5003 flags.go:64] FLAG: --pod-max-pids="-1" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665952 5003 flags.go:64] FLAG: --pods-per-core="0" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665956 5003 flags.go:64] FLAG: --port="10250" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665960 5003 flags.go:64] FLAG: --protect-kernel-defaults="false" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665964 5003 flags.go:64] FLAG: --provider-id="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665968 5003 flags.go:64] FLAG: --qos-reserved="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665973 5003 flags.go:64] FLAG: --read-only-port="10255" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665977 5003 flags.go:64] FLAG: --register-node="true" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665981 5003 flags.go:64] FLAG: --register-schedulable="true" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665985 5003 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665993 5003 flags.go:64] FLAG: --registry-burst="10" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.665997 5003 flags.go:64] FLAG: --registry-qps="5" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666001 5003 flags.go:64] FLAG: --reserved-cpus="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666005 5003 flags.go:64] FLAG: --reserved-memory="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666014 5003 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666032 5003 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666036 5003 flags.go:64] FLAG: --rotate-certificates="false" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666041 5003 flags.go:64] FLAG: --rotate-server-certificates="false" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666045 5003 flags.go:64] FLAG: --runonce="false" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666049 5003 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666061 5003 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666065 5003 flags.go:64] FLAG: --seccomp-default="false" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666069 5003 flags.go:64] FLAG: --serialize-image-pulls="true" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666073 5003 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666077 5003 flags.go:64] FLAG: --storage-driver-db="cadvisor" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666083 5003 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666087 5003 flags.go:64] FLAG: --storage-driver-password="root" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666092 5003 flags.go:64] FLAG: --storage-driver-secure="false" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666096 5003 flags.go:64] FLAG: --storage-driver-table="stats" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666101 5003 flags.go:64] FLAG: --storage-driver-user="root" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666105 5003 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666109 5003 flags.go:64] FLAG: --sync-frequency="1m0s" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666114 5003 flags.go:64] FLAG: --system-cgroups="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666118 5003 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666125 5003 flags.go:64] FLAG: --system-reserved-cgroup="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666129 5003 flags.go:64] FLAG: --tls-cert-file="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666133 5003 flags.go:64] FLAG: --tls-cipher-suites="[]" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666140 5003 flags.go:64] FLAG: --tls-min-version="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666144 5003 flags.go:64] FLAG: --tls-private-key-file="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666148 5003 flags.go:64] FLAG: --topology-manager-policy="none" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666152 5003 flags.go:64] FLAG: --topology-manager-policy-options="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666156 5003 flags.go:64] FLAG: --topology-manager-scope="container" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666161 5003 flags.go:64] FLAG: --v="2" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666167 5003 flags.go:64] FLAG: --version="false" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666173 5003 flags.go:64] FLAG: --vmodule="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666178 5003 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666183 5003 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666323 5003 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666328 5003 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666332 5003 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666336 5003 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666340 5003 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666343 5003 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666347 5003 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666351 5003 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666354 5003 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666366 5003 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666370 5003 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666374 5003 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666377 5003 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666381 5003 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666385 5003 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666388 5003 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666392 5003 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666396 5003 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666400 5003 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666403 5003 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666407 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666411 5003 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666415 5003 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666418 5003 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666421 5003 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666425 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666428 5003 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666432 5003 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666435 5003 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666438 5003 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666442 5003 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666445 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666449 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666452 5003 feature_gate.go:330] unrecognized feature gate: Example Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666456 5003 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666459 5003 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666463 5003 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666466 5003 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666470 5003 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666474 5003 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666478 5003 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666482 5003 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666486 5003 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666489 5003 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666494 5003 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666504 5003 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666508 5003 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666512 5003 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666516 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666520 5003 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666525 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666528 5003 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666533 5003 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666538 5003 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666542 5003 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666545 5003 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666549 5003 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666553 5003 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666556 5003 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666560 5003 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666564 5003 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666568 5003 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666573 5003 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666577 5003 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666581 5003 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666585 5003 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666596 5003 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666600 5003 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666604 5003 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666607 5003 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.666611 5003 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.666624 5003 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.677453 5003 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.677494 5003 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677606 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677616 5003 feature_gate.go:330] unrecognized feature gate: Example Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677623 5003 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677629 5003 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677634 5003 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677641 5003 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677649 5003 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677659 5003 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677665 5003 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677671 5003 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677677 5003 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677683 5003 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677691 5003 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677698 5003 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677704 5003 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677711 5003 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677716 5003 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677722 5003 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677728 5003 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677733 5003 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677738 5003 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677744 5003 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677749 5003 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677755 5003 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677760 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677766 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677771 5003 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677777 5003 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677782 5003 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677787 5003 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677793 5003 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677799 5003 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677806 5003 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677815 5003 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677825 5003 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677834 5003 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677843 5003 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677851 5003 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677858 5003 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677865 5003 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677870 5003 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677894 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677901 5003 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677908 5003 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677914 5003 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677920 5003 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677928 5003 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677934 5003 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677939 5003 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677945 5003 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677951 5003 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677957 5003 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677962 5003 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677968 5003 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677973 5003 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677978 5003 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677984 5003 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677990 5003 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.677995 5003 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678001 5003 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678007 5003 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678049 5003 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678062 5003 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678072 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678079 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678085 5003 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678091 5003 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678096 5003 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678105 5003 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678113 5003 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678119 5003 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.678130 5003 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678313 5003 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678324 5003 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678332 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678337 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678343 5003 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678349 5003 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678355 5003 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678361 5003 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678366 5003 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678372 5003 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678379 5003 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678385 5003 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678391 5003 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678397 5003 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678403 5003 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678408 5003 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678414 5003 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678420 5003 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678427 5003 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678433 5003 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678439 5003 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678444 5003 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678450 5003 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678458 5003 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678466 5003 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678474 5003 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678481 5003 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678486 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678492 5003 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678498 5003 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678504 5003 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678509 5003 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678515 5003 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678520 5003 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678526 5003 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678533 5003 feature_gate.go:330] unrecognized feature gate: Example Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678538 5003 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678546 5003 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678552 5003 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678557 5003 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678564 5003 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678570 5003 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678577 5003 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678583 5003 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678589 5003 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678595 5003 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678601 5003 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678606 5003 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678612 5003 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678618 5003 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678623 5003 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678629 5003 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678634 5003 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678640 5003 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678645 5003 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678651 5003 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678657 5003 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678663 5003 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678668 5003 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678674 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678680 5003 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678685 5003 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678691 5003 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678698 5003 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678705 5003 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678711 5003 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678717 5003 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678723 5003 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678728 5003 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678734 5003 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.678740 5003 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.678749 5003 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.679328 5003 server.go:940] "Client rotation is on, will bootstrap in background" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.682786 5003 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.682915 5003 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.683649 5003 server.go:997] "Starting client certificate rotation" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.683676 5003 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.683951 5003 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-16 18:25:01.325660523 +0000 UTC Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.684036 5003 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.690073 5003 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 04 11:48:04 crc kubenswrapper[5003]: E0104 11:48:04.691588 5003 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.5:6443: connect: connection refused" logger="UnhandledError" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.692537 5003 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.702656 5003 log.go:25] "Validated CRI v1 runtime API" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.715573 5003 log.go:25] "Validated CRI v1 image API" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.716828 5003 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.719127 5003 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2026-01-04-11-43-31-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.719172 5003 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.741134 5003 manager.go:217] Machine: {Timestamp:2026-01-04 11:48:04.73957735 +0000 UTC m=+0.212607241 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:11f74ffa-1339-43e9-94e3-2ecf4c29070a BootID:7f249d9a-fde4-4cda-b9be-cd9f47dca495 Filesystems:[{Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:82:87:b2 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:82:87:b2 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:e0:b0:1a Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:ce:91:ba Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:ea:5b:1a Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:90:2e:91 Speed:-1 Mtu:1496} {Name:ens7.23 MacAddress:52:54:00:eb:8b:77 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:32:c6:99:5b:73:89 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:72:d9:21:d9:05:4e Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.741472 5003 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.741800 5003 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.742396 5003 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.742623 5003 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.742660 5003 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.742944 5003 topology_manager.go:138] "Creating topology manager with none policy" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.742960 5003 container_manager_linux.go:303] "Creating device plugin manager" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.743207 5003 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.743256 5003 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.743570 5003 state_mem.go:36] "Initialized new in-memory state store" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.743682 5003 server.go:1245] "Using root directory" path="/var/lib/kubelet" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.744497 5003 kubelet.go:418] "Attempting to sync node with API server" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.744527 5003 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.744562 5003 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.744581 5003 kubelet.go:324] "Adding apiserver pod source" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.744598 5003 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.746199 5003 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.5:6443: connect: connection refused Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.746204 5003 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.5:6443: connect: connection refused Jan 04 11:48:04 crc kubenswrapper[5003]: E0104 11:48:04.746308 5003 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.5:6443: connect: connection refused" logger="UnhandledError" Jan 04 11:48:04 crc kubenswrapper[5003]: E0104 11:48:04.746339 5003 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.5:6443: connect: connection refused" logger="UnhandledError" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.746926 5003 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.747510 5003 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.748322 5003 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.748926 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.748957 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.748968 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.748980 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.748993 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.749000 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.749009 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.749039 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.749048 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.749056 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.749068 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.749115 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.749271 5003 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.749818 5003 server.go:1280] "Started kubelet" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.750005 5003 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.750819 5003 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.5:6443: connect: connection refused Jan 04 11:48:04 crc systemd[1]: Started Kubernetes Kubelet. Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.751749 5003 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.753638 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.753694 5003 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.753804 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 08:22:18.795573104 +0000 UTC Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.753926 5003 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 164h34m14.041651657s for next certificate rotation Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.754181 5003 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.754922 5003 server.go:460] "Adding debug handlers to kubelet server" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.756789 5003 volume_manager.go:287] "The desired_state_of_world populator starts" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.756825 5003 volume_manager.go:289] "Starting Kubelet Volume Manager" Jan 04 11:48:04 crc kubenswrapper[5003]: E0104 11:48:04.757186 5003 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.757923 5003 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.5:6443: connect: connection refused Jan 04 11:48:04 crc kubenswrapper[5003]: E0104 11:48:04.757979 5003 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.5:6443: connect: connection refused" logger="UnhandledError" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.758005 5003 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Jan 04 11:48:04 crc kubenswrapper[5003]: E0104 11:48:04.758945 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.5:6443: connect: connection refused" interval="200ms" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.759506 5003 factory.go:55] Registering systemd factory Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.759528 5003 factory.go:221] Registration of the systemd container factory successfully Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.759912 5003 factory.go:153] Registering CRI-O factory Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.759958 5003 factory.go:221] Registration of the crio container factory successfully Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.760070 5003 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.760109 5003 factory.go:103] Registering Raw factory Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.760130 5003 manager.go:1196] Started watching for new ooms in manager Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.760732 5003 manager.go:319] Starting recovery of all containers Jan 04 11:48:04 crc kubenswrapper[5003]: E0104 11:48:04.759934 5003 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.5:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.188784b362524a26 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-04 11:48:04.749781542 +0000 UTC m=+0.222811403,LastTimestamp:2026-01-04 11:48:04.749781542 +0000 UTC m=+0.222811403,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.769596 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.769661 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.769673 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.769683 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.769695 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.769705 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.769716 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.769748 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.769759 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.769767 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.769775 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.769783 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.769793 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.769806 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.769818 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.769834 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.769844 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.769864 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.769874 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.769885 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.769894 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.769905 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.769915 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.769948 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.769982 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.769991 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770040 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770058 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770072 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770094 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770137 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770153 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770166 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770178 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770188 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770200 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770211 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770222 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770232 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770243 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770255 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770267 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770278 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770290 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770300 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770311 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770322 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770334 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770345 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770356 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770366 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770378 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770394 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770414 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770428 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770439 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770451 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770463 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770475 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770487 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770498 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770510 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770521 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770534 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770545 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770558 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770569 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770580 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770591 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770603 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770614 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770625 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770636 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770647 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770659 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770670 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770681 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770692 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770705 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770717 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770729 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770740 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770751 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770761 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770772 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770784 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770794 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770805 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770820 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770833 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770848 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770860 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770878 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770888 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770900 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770912 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770922 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770933 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770944 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770955 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770965 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770976 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770988 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.770999 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771031 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771044 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771056 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771067 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771078 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771091 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771128 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771142 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771152 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771165 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771177 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771188 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771200 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771212 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771223 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771235 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771245 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771256 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771268 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771281 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771292 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771310 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771323 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771337 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771349 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771360 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771372 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771540 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771555 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771566 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771578 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771589 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771601 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771613 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771630 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771642 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771652 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771664 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.771677 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772180 5003 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772203 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772215 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772226 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772240 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772513 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772527 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772539 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772551 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772561 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772575 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772586 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772598 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772610 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772620 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772629 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772641 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772651 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772660 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772669 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772679 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772692 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772704 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772720 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772732 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772745 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772756 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772769 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772781 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772794 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772816 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772829 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772845 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772860 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772874 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772886 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772902 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772917 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772932 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772947 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772959 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772973 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.772985 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.773004 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.773130 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.773148 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.773162 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.773175 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.773192 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.773205 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.773217 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.773231 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.773244 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.773257 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.773270 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.773284 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.773298 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.773311 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.773324 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.773337 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.773350 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.773363 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.773375 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.773390 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.773408 5003 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.773419 5003 reconstruct.go:97] "Volume reconstruction finished" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.773427 5003 reconciler.go:26] "Reconciler: start to sync state" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.788816 5003 manager.go:324] Recovery completed Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.802945 5003 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.803521 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.805328 5003 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.805387 5003 status_manager.go:217] "Starting to sync pod status with apiserver" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.805429 5003 kubelet.go:2335] "Starting kubelet main sync loop" Jan 04 11:48:04 crc kubenswrapper[5003]: E0104 11:48:04.805488 5003 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.806236 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.806286 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.806312 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.807176 5003 cpu_manager.go:225] "Starting CPU manager" policy="none" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.807200 5003 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.807289 5003 state_mem.go:36] "Initialized new in-memory state store" Jan 04 11:48:04 crc kubenswrapper[5003]: W0104 11:48:04.807342 5003 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.5:6443: connect: connection refused Jan 04 11:48:04 crc kubenswrapper[5003]: E0104 11:48:04.807439 5003 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.5:6443: connect: connection refused" logger="UnhandledError" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.815973 5003 policy_none.go:49] "None policy: Start" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.817281 5003 memory_manager.go:170] "Starting memorymanager" policy="None" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.817309 5003 state_mem.go:35] "Initializing new in-memory state store" Jan 04 11:48:04 crc kubenswrapper[5003]: E0104 11:48:04.857760 5003 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.881462 5003 manager.go:334] "Starting Device Plugin manager" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.881530 5003 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.881544 5003 server.go:79] "Starting device plugin registration server" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.882134 5003 eviction_manager.go:189] "Eviction manager: starting control loop" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.882152 5003 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.882370 5003 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.882523 5003 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.882534 5003 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Jan 04 11:48:04 crc kubenswrapper[5003]: E0104 11:48:04.890175 5003 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.906590 5003 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.906705 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.907750 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.907814 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.907828 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.908069 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.908523 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.908613 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.909414 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.909444 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.909458 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.909662 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.909847 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.909875 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.909923 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.909884 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.909977 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.910497 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.910547 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.910558 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.910662 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.910716 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.910732 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.910792 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.910926 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.910972 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.912186 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.912221 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.912186 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.912235 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.912249 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.912278 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.912439 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.912556 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.912588 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.913099 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.913131 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.913138 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.913281 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.913310 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.913716 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.913741 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.913748 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.913986 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.914003 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.914026 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:04 crc kubenswrapper[5003]: E0104 11:48:04.959866 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.5:6443: connect: connection refused" interval="400ms" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.975704 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.975761 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.975818 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.975851 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.975888 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.975921 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.975950 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.975979 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.976013 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.976067 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.976094 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.976123 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.976208 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.976241 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.976296 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.982794 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.984187 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.984232 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.984247 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:04 crc kubenswrapper[5003]: I0104 11:48:04.984280 5003 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 04 11:48:04 crc kubenswrapper[5003]: E0104 11:48:04.984797 5003 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.5:6443: connect: connection refused" node="crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.077539 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.077617 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.077669 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.077734 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.077773 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.077809 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.077827 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.077777 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.077912 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.077961 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.077844 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.078034 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.077999 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.078063 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.078095 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.078115 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.078141 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.078215 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.078269 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.078332 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.078336 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.078401 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.078445 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.078498 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.078713 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.078978 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.079123 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.079250 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.079543 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.079346 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.185003 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.187167 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.187231 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.187251 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.187295 5003 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 04 11:48:05 crc kubenswrapper[5003]: E0104 11:48:05.187746 5003 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.5:6443: connect: connection refused" node="crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.242591 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.247581 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.275683 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: W0104 11:48:05.277578 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-32bac358cbf7c38b5e3fe01a4a3e9f435d87ee0757ac07bde0adfaf22a7fe417 WatchSource:0}: Error finding container 32bac358cbf7c38b5e3fe01a4a3e9f435d87ee0757ac07bde0adfaf22a7fe417: Status 404 returned error can't find the container with id 32bac358cbf7c38b5e3fe01a4a3e9f435d87ee0757ac07bde0adfaf22a7fe417 Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.297240 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: W0104 11:48:05.297728 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-06e3cf346b765fe15baf014fe8a0a140e031b15c5b1b51aa11530140ce7f34d2 WatchSource:0}: Error finding container 06e3cf346b765fe15baf014fe8a0a140e031b15c5b1b51aa11530140ce7f34d2: Status 404 returned error can't find the container with id 06e3cf346b765fe15baf014fe8a0a140e031b15c5b1b51aa11530140ce7f34d2 Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.303722 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 04 11:48:05 crc kubenswrapper[5003]: W0104 11:48:05.318145 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-18544c87b3c60e32a057bad10710cb973b53fd1b2722c2e102f9193909ed835e WatchSource:0}: Error finding container 18544c87b3c60e32a057bad10710cb973b53fd1b2722c2e102f9193909ed835e: Status 404 returned error can't find the container with id 18544c87b3c60e32a057bad10710cb973b53fd1b2722c2e102f9193909ed835e Jan 04 11:48:05 crc kubenswrapper[5003]: W0104 11:48:05.319387 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-801307a89c7040b087d1944dc5dff752691b2200835d26b1db5b4af995c392a1 WatchSource:0}: Error finding container 801307a89c7040b087d1944dc5dff752691b2200835d26b1db5b4af995c392a1: Status 404 returned error can't find the container with id 801307a89c7040b087d1944dc5dff752691b2200835d26b1db5b4af995c392a1 Jan 04 11:48:05 crc kubenswrapper[5003]: E0104 11:48:05.360986 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.5:6443: connect: connection refused" interval="800ms" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.591133 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.594266 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.594326 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.594338 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.594376 5003 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 04 11:48:05 crc kubenswrapper[5003]: E0104 11:48:05.595072 5003 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.5:6443: connect: connection refused" node="crc" Jan 04 11:48:05 crc kubenswrapper[5003]: W0104 11:48:05.610386 5003 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.5:6443: connect: connection refused Jan 04 11:48:05 crc kubenswrapper[5003]: E0104 11:48:05.610479 5003 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.5:6443: connect: connection refused" logger="UnhandledError" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.752562 5003 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.5:6443: connect: connection refused Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.831948 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d"} Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.832206 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"18544c87b3c60e32a057bad10710cb973b53fd1b2722c2e102f9193909ed835e"} Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.833715 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2"} Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.833747 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"06e3cf346b765fe15baf014fe8a0a140e031b15c5b1b51aa11530140ce7f34d2"} Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.833866 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.836213 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.836266 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.836279 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.841571 5003 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d" exitCode=0 Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.841671 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d"} Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.841715 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"68bd8ccd28348a5bdf92d93ba8fa726d0213e2ef5b224c334954bd6dd4dc716f"} Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.841873 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.843063 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.843102 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.843118 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.846577 5003 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="207fc3c1ffae679d524ce82df81b41f08d722b0923d520ef075f213bc6b71cde" exitCode=0 Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.846661 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"207fc3c1ffae679d524ce82df81b41f08d722b0923d520ef075f213bc6b71cde"} Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.846688 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"32bac358cbf7c38b5e3fe01a4a3e9f435d87ee0757ac07bde0adfaf22a7fe417"} Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.846770 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.847771 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.847808 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.847821 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.851488 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"19b0363ec1bdedddf0eca77d19de840253fdec6bfacc0ba2a2dde00536288c5d"} Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.851599 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"801307a89c7040b087d1944dc5dff752691b2200835d26b1db5b4af995c392a1"} Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.851831 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.853601 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.853675 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:05 crc kubenswrapper[5003]: I0104 11:48:05.853693 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:05 crc kubenswrapper[5003]: W0104 11:48:05.925853 5003 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.5:6443: connect: connection refused Jan 04 11:48:05 crc kubenswrapper[5003]: E0104 11:48:05.925968 5003 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.5:6443: connect: connection refused" logger="UnhandledError" Jan 04 11:48:05 crc kubenswrapper[5003]: W0104 11:48:05.973676 5003 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.5:6443: connect: connection refused Jan 04 11:48:05 crc kubenswrapper[5003]: E0104 11:48:05.973822 5003 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.5:6443: connect: connection refused" logger="UnhandledError" Jan 04 11:48:06 crc kubenswrapper[5003]: W0104 11:48:06.060005 5003 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.5:6443: connect: connection refused Jan 04 11:48:06 crc kubenswrapper[5003]: E0104 11:48:06.060282 5003 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.5:6443: connect: connection refused" logger="UnhandledError" Jan 04 11:48:06 crc kubenswrapper[5003]: E0104 11:48:06.162346 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.5:6443: connect: connection refused" interval="1.6s" Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.395572 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.396942 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.396974 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.396983 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.397005 5003 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 04 11:48:06 crc kubenswrapper[5003]: E0104 11:48:06.397607 5003 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.5:6443: connect: connection refused" node="crc" Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.751653 5003 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.5:6443: connect: connection refused Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.794541 5003 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 04 11:48:06 crc kubenswrapper[5003]: E0104 11:48:06.796618 5003 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.5:6443: connect: connection refused" logger="UnhandledError" Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.860252 5003 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="19b0363ec1bdedddf0eca77d19de840253fdec6bfacc0ba2a2dde00536288c5d" exitCode=0 Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.860381 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"19b0363ec1bdedddf0eca77d19de840253fdec6bfacc0ba2a2dde00536288c5d"} Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.860558 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"3b01101e6a58ad97c5626e5e6b439ac29a0c28384bfc9027b58bc7713942206d"} Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.860787 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.860623 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"a519008319f92603b4fd769aa06e46ece0161210016ae942dde01d828e4153fe"} Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.861243 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"b2096947ed145a7b9482af4491217b3657fbbc74654790676a06f70c938e59d0"} Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.862671 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.862759 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.862778 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.866376 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a"} Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.866442 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8"} Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.866465 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17"} Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.866536 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.868276 5003 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2" exitCode=0 Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.868427 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2"} Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.868596 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.868674 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.868608 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.868699 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.870139 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.870195 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.870222 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.872265 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538"} Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.872338 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.872474 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.882190 5003 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538" exitCode=0 Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.882571 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.882624 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.882641 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.883003 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.883123 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.883147 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.888396 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"4c0d47ed48809e56449b68ddfc86eb427381be87a8df60f581867c82d8c44890"} Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.888599 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.890370 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.890445 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:06 crc kubenswrapper[5003]: I0104 11:48:06.890466 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:07 crc kubenswrapper[5003]: I0104 11:48:07.901142 5003 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4" exitCode=0 Jan 04 11:48:07 crc kubenswrapper[5003]: I0104 11:48:07.901233 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4"} Jan 04 11:48:07 crc kubenswrapper[5003]: I0104 11:48:07.901544 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:07 crc kubenswrapper[5003]: I0104 11:48:07.903064 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:07 crc kubenswrapper[5003]: I0104 11:48:07.903147 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:07 crc kubenswrapper[5003]: I0104 11:48:07.903169 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:07 crc kubenswrapper[5003]: I0104 11:48:07.905216 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10"} Jan 04 11:48:07 crc kubenswrapper[5003]: I0104 11:48:07.905318 5003 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 04 11:48:07 crc kubenswrapper[5003]: I0104 11:48:07.905336 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655"} Jan 04 11:48:07 crc kubenswrapper[5003]: I0104 11:48:07.905356 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:07 crc kubenswrapper[5003]: I0104 11:48:07.905412 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:07 crc kubenswrapper[5003]: I0104 11:48:07.905373 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b"} Jan 04 11:48:07 crc kubenswrapper[5003]: I0104 11:48:07.905590 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69"} Jan 04 11:48:07 crc kubenswrapper[5003]: I0104 11:48:07.907977 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:07 crc kubenswrapper[5003]: I0104 11:48:07.908421 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:07 crc kubenswrapper[5003]: I0104 11:48:07.908437 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:07 crc kubenswrapper[5003]: I0104 11:48:07.910608 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:07 crc kubenswrapper[5003]: I0104 11:48:07.910643 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:07 crc kubenswrapper[5003]: I0104 11:48:07.910656 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:07 crc kubenswrapper[5003]: I0104 11:48:07.998319 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:08 crc kubenswrapper[5003]: I0104 11:48:08.000226 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:08 crc kubenswrapper[5003]: I0104 11:48:08.000315 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:08 crc kubenswrapper[5003]: I0104 11:48:08.000345 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:08 crc kubenswrapper[5003]: I0104 11:48:08.000404 5003 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 04 11:48:08 crc kubenswrapper[5003]: I0104 11:48:08.912297 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84"} Jan 04 11:48:08 crc kubenswrapper[5003]: I0104 11:48:08.912522 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:08 crc kubenswrapper[5003]: I0104 11:48:08.913939 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:08 crc kubenswrapper[5003]: I0104 11:48:08.913983 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:08 crc kubenswrapper[5003]: I0104 11:48:08.913997 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:08 crc kubenswrapper[5003]: I0104 11:48:08.921667 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4"} Jan 04 11:48:08 crc kubenswrapper[5003]: I0104 11:48:08.921730 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4"} Jan 04 11:48:08 crc kubenswrapper[5003]: I0104 11:48:08.921743 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f"} Jan 04 11:48:08 crc kubenswrapper[5003]: I0104 11:48:08.921755 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6"} Jan 04 11:48:09 crc kubenswrapper[5003]: I0104 11:48:09.934417 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c"} Jan 04 11:48:09 crc kubenswrapper[5003]: I0104 11:48:09.934513 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:09 crc kubenswrapper[5003]: I0104 11:48:09.934636 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:09 crc kubenswrapper[5003]: I0104 11:48:09.934653 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:09 crc kubenswrapper[5003]: I0104 11:48:09.936255 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:09 crc kubenswrapper[5003]: I0104 11:48:09.936316 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:09 crc kubenswrapper[5003]: I0104 11:48:09.936339 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:09 crc kubenswrapper[5003]: I0104 11:48:09.937374 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:09 crc kubenswrapper[5003]: I0104 11:48:09.937429 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:09 crc kubenswrapper[5003]: I0104 11:48:09.937454 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:10 crc kubenswrapper[5003]: I0104 11:48:10.937878 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:10 crc kubenswrapper[5003]: I0104 11:48:10.937988 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:10 crc kubenswrapper[5003]: I0104 11:48:10.939796 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:10 crc kubenswrapper[5003]: I0104 11:48:10.939881 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:10 crc kubenswrapper[5003]: I0104 11:48:10.939941 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:10 crc kubenswrapper[5003]: I0104 11:48:10.940571 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:10 crc kubenswrapper[5003]: I0104 11:48:10.940703 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:10 crc kubenswrapper[5003]: I0104 11:48:10.940814 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:11 crc kubenswrapper[5003]: I0104 11:48:11.002390 5003 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 04 11:48:11 crc kubenswrapper[5003]: I0104 11:48:11.663586 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:11 crc kubenswrapper[5003]: I0104 11:48:11.926268 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:11 crc kubenswrapper[5003]: I0104 11:48:11.926587 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:11 crc kubenswrapper[5003]: I0104 11:48:11.929559 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:11 crc kubenswrapper[5003]: I0104 11:48:11.929621 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:11 crc kubenswrapper[5003]: I0104 11:48:11.929639 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:11 crc kubenswrapper[5003]: I0104 11:48:11.934749 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:11 crc kubenswrapper[5003]: I0104 11:48:11.942795 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:11 crc kubenswrapper[5003]: I0104 11:48:11.942968 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:11 crc kubenswrapper[5003]: I0104 11:48:11.944353 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:11 crc kubenswrapper[5003]: I0104 11:48:11.945111 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:11 crc kubenswrapper[5003]: I0104 11:48:11.945186 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:11 crc kubenswrapper[5003]: I0104 11:48:11.945204 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:11 crc kubenswrapper[5003]: I0104 11:48:11.946140 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:11 crc kubenswrapper[5003]: I0104 11:48:11.946208 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:11 crc kubenswrapper[5003]: I0104 11:48:11.946229 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:12 crc kubenswrapper[5003]: I0104 11:48:12.671064 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Jan 04 11:48:12 crc kubenswrapper[5003]: I0104 11:48:12.671350 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:12 crc kubenswrapper[5003]: I0104 11:48:12.673180 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:12 crc kubenswrapper[5003]: I0104 11:48:12.673248 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:12 crc kubenswrapper[5003]: I0104 11:48:12.673266 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:12 crc kubenswrapper[5003]: I0104 11:48:12.946131 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:12 crc kubenswrapper[5003]: I0104 11:48:12.947794 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:12 crc kubenswrapper[5003]: I0104 11:48:12.947871 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:12 crc kubenswrapper[5003]: I0104 11:48:12.947896 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:13 crc kubenswrapper[5003]: I0104 11:48:13.192705 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:13 crc kubenswrapper[5003]: I0104 11:48:13.814892 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:13 crc kubenswrapper[5003]: I0104 11:48:13.815124 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:13 crc kubenswrapper[5003]: I0104 11:48:13.816477 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:13 crc kubenswrapper[5003]: I0104 11:48:13.816528 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:13 crc kubenswrapper[5003]: I0104 11:48:13.816544 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:13 crc kubenswrapper[5003]: I0104 11:48:13.948604 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:13 crc kubenswrapper[5003]: I0104 11:48:13.950404 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:13 crc kubenswrapper[5003]: I0104 11:48:13.950492 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:13 crc kubenswrapper[5003]: I0104 11:48:13.950551 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:14 crc kubenswrapper[5003]: I0104 11:48:14.693967 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:14 crc kubenswrapper[5003]: E0104 11:48:14.890652 5003 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 04 11:48:14 crc kubenswrapper[5003]: I0104 11:48:14.951710 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:14 crc kubenswrapper[5003]: I0104 11:48:14.953208 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:14 crc kubenswrapper[5003]: I0104 11:48:14.953276 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:14 crc kubenswrapper[5003]: I0104 11:48:14.953298 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:15 crc kubenswrapper[5003]: I0104 11:48:15.040228 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 04 11:48:15 crc kubenswrapper[5003]: I0104 11:48:15.040559 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:15 crc kubenswrapper[5003]: I0104 11:48:15.042331 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:15 crc kubenswrapper[5003]: I0104 11:48:15.042386 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:15 crc kubenswrapper[5003]: I0104 11:48:15.042400 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:15 crc kubenswrapper[5003]: I0104 11:48:15.695759 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:15 crc kubenswrapper[5003]: I0104 11:48:15.955091 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:15 crc kubenswrapper[5003]: I0104 11:48:15.956764 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:15 crc kubenswrapper[5003]: I0104 11:48:15.956846 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:15 crc kubenswrapper[5003]: I0104 11:48:15.956875 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:16 crc kubenswrapper[5003]: I0104 11:48:16.192907 5003 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 04 11:48:16 crc kubenswrapper[5003]: I0104 11:48:16.193006 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 04 11:48:17 crc kubenswrapper[5003]: I0104 11:48:17.529051 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Jan 04 11:48:17 crc kubenswrapper[5003]: I0104 11:48:17.529397 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:17 crc kubenswrapper[5003]: I0104 11:48:17.531351 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:17 crc kubenswrapper[5003]: I0104 11:48:17.531710 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:17 crc kubenswrapper[5003]: I0104 11:48:17.531807 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:17 crc kubenswrapper[5003]: I0104 11:48:17.753449 5003 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Jan 04 11:48:17 crc kubenswrapper[5003]: E0104 11:48:17.764381 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" interval="3.2s" Jan 04 11:48:18 crc kubenswrapper[5003]: E0104 11:48:18.001919 5003 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Jan 04 11:48:18 crc kubenswrapper[5003]: W0104 11:48:18.032637 5003 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Jan 04 11:48:18 crc kubenswrapper[5003]: I0104 11:48:18.032801 5003 trace.go:236] Trace[1930603154]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (04-Jan-2026 11:48:08.030) (total time: 10002ms): Jan 04 11:48:18 crc kubenswrapper[5003]: Trace[1930603154]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10002ms (11:48:18.032) Jan 04 11:48:18 crc kubenswrapper[5003]: Trace[1930603154]: [10.002342924s] [10.002342924s] END Jan 04 11:48:18 crc kubenswrapper[5003]: E0104 11:48:18.032845 5003 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Jan 04 11:48:18 crc kubenswrapper[5003]: I0104 11:48:18.186277 5003 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Jan 04 11:48:18 crc kubenswrapper[5003]: I0104 11:48:18.186424 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Jan 04 11:48:18 crc kubenswrapper[5003]: I0104 11:48:18.556137 5003 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Jan 04 11:48:18 crc kubenswrapper[5003]: I0104 11:48:18.556230 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Jan 04 11:48:18 crc kubenswrapper[5003]: I0104 11:48:18.561389 5003 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Jan 04 11:48:18 crc kubenswrapper[5003]: I0104 11:48:18.561473 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Jan 04 11:48:21 crc kubenswrapper[5003]: I0104 11:48:21.203131 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:21 crc kubenswrapper[5003]: I0104 11:48:21.205779 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:21 crc kubenswrapper[5003]: I0104 11:48:21.205852 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:21 crc kubenswrapper[5003]: I0104 11:48:21.205871 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:21 crc kubenswrapper[5003]: I0104 11:48:21.205910 5003 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 04 11:48:21 crc kubenswrapper[5003]: E0104 11:48:21.211860 5003 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Jan 04 11:48:21 crc kubenswrapper[5003]: I0104 11:48:21.672572 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:21 crc kubenswrapper[5003]: I0104 11:48:21.672938 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:21 crc kubenswrapper[5003]: I0104 11:48:21.675248 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:21 crc kubenswrapper[5003]: I0104 11:48:21.675305 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:21 crc kubenswrapper[5003]: I0104 11:48:21.675326 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:21 crc kubenswrapper[5003]: I0104 11:48:21.678696 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:21 crc kubenswrapper[5003]: I0104 11:48:21.974444 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:21 crc kubenswrapper[5003]: I0104 11:48:21.976186 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:21 crc kubenswrapper[5003]: I0104 11:48:21.976272 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:21 crc kubenswrapper[5003]: I0104 11:48:21.976292 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:22 crc kubenswrapper[5003]: I0104 11:48:22.529123 5003 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.541795 5003 trace.go:236] Trace[630308277]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (04-Jan-2026 11:48:08.629) (total time: 14912ms): Jan 04 11:48:23 crc kubenswrapper[5003]: Trace[630308277]: ---"Objects listed" error: 14912ms (11:48:23.541) Jan 04 11:48:23 crc kubenswrapper[5003]: Trace[630308277]: [14.912649502s] [14.912649502s] END Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.541846 5003 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.542988 5003 trace.go:236] Trace[426800030]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (04-Jan-2026 11:48:08.774) (total time: 14768ms): Jan 04 11:48:23 crc kubenswrapper[5003]: Trace[426800030]: ---"Objects listed" error: 14767ms (11:48:23.542) Jan 04 11:48:23 crc kubenswrapper[5003]: Trace[426800030]: [14.768057958s] [14.768057958s] END Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.543087 5003 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.543187 5003 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.543336 5003 trace.go:236] Trace[833870928]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (04-Jan-2026 11:48:09.239) (total time: 14303ms): Jan 04 11:48:23 crc kubenswrapper[5003]: Trace[833870928]: ---"Objects listed" error: 14303ms (11:48:23.543) Jan 04 11:48:23 crc kubenswrapper[5003]: Trace[833870928]: [14.303514478s] [14.303514478s] END Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.543361 5003 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.565404 5003 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.689752 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.702314 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.758509 5003 apiserver.go:52] "Watching apiserver" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.761496 5003 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.762167 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h"] Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.762635 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.763429 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:23 crc kubenswrapper[5003]: E0104 11:48:23.763528 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.765209 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.765388 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.765614 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.765772 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.765637 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:23 crc kubenswrapper[5003]: E0104 11:48:23.765873 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.765972 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.766254 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 04 11:48:23 crc kubenswrapper[5003]: E0104 11:48:23.767645 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.769083 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.769256 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.769642 5003 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:40834->192.168.126.11:17697: read: connection reset by peer" start-of-body= Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.769685 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:40834->192.168.126.11:17697: read: connection reset by peer" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.769785 5003 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:37988->192.168.126.11:17697: read: connection reset by peer" start-of-body= Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.769802 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:37988->192.168.126.11:17697: read: connection reset by peer" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.770616 5003 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.770795 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.771102 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.771150 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.771170 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.771302 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.790659 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.805242 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.818685 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.830347 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.840479 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.850812 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.858860 5003 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.865818 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.874640 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.946213 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.946262 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.946292 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.946313 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.946334 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.946351 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.946370 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.946397 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.946414 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.946434 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.946452 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.946470 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.946493 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.946552 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.946577 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.946603 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.946633 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.947248 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.947251 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.947313 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.947191 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.947304 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.947530 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.947677 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.947769 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.947863 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.947898 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.948029 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.948091 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.948048 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.948127 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.948222 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.948601 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.948625 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.948672 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.948771 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.948774 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.948896 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.949154 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.949192 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.949267 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.949704 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.949780 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.951457 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.951616 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.951702 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.951750 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.951924 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.951979 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.952923 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.953094 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.953131 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.953145 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.953185 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.953243 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.953294 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.953296 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.953404 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.953447 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.953469 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.953502 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.953501 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.953543 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.953522 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.953765 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.953849 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.953908 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.953920 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.954001 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.954106 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.954152 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.954203 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.954267 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.954322 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.954369 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.954405 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.954441 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.954474 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.954520 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.954560 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.954599 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.954634 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.954668 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.954712 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.954766 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.954840 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.954904 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.954982 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.955087 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.955149 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.955184 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.955222 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.955265 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.955301 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.955341 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.955376 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.955424 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.953877 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.954070 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.954331 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.954340 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.955263 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.955307 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.955379 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: E0104 11:48:23.955479 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:48:24.45545907 +0000 UTC m=+19.928488911 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.956588 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.956615 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.956635 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.956654 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.956675 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.956695 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.956702 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.956717 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.956798 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.956822 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.956828 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.956848 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.956872 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.956905 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.956928 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.956946 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.956995 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.957051 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.957070 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.957090 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.957112 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.957134 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.957154 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.957173 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.957191 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.957211 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.957231 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.957258 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.957276 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.957298 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.957316 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.957336 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.957314 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.957498 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.957353 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.957650 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.957693 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.957735 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.957773 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.957814 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.957847 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.957880 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.957916 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.957952 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.957989 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.958057 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.958095 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.958132 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.958172 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.958210 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.958248 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.958287 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.958336 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.958379 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.958419 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.958457 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.958526 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.958560 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.958599 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.958636 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.958671 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.958707 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.958747 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.958785 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.958821 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.958857 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.958890 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.958928 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.958966 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.958999 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959078 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959114 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959156 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959197 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959231 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959268 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959301 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959338 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959374 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959410 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959452 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959487 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959523 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959558 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959594 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959632 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959667 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959700 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959738 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959775 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959811 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959850 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959885 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959924 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959960 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959996 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.960060 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.960097 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.960133 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.960176 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.960213 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.960248 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.960282 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.960318 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.960354 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.960392 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.960430 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.960468 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.960507 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.960545 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.960583 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.960621 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.960658 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.960694 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.960730 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.960767 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.960805 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.960869 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.960908 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.960944 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.960983 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.961048 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.961085 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.961125 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.961164 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.961203 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.961240 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.961281 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.961363 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.961407 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.961493 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.961550 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.961605 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.961650 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.961691 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.961728 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.961771 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.961809 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.961853 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.961887 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.961928 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.957694 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.957734 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.955994 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.968172 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.968236 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.955997 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.956220 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.968313 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.956098 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.956471 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.956555 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.957898 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.958096 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.958271 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.955743 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.958398 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.968553 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.958453 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.958676 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.958932 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959112 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959113 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959134 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959390 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959428 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959441 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959797 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.959952 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.960058 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.960421 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.960468 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.960786 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.960949 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.960910 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.961109 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.961462 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.961486 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.961644 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.962039 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.962066 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.961977 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.962192 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.962542 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.962680 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.962744 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.963244 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.963602 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.963620 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.963883 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.963942 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.963628 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.964641 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.965057 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.965144 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.965236 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.965619 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.965766 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.966187 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.966306 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.966336 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.966354 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.966668 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.967402 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.967824 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.967825 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.968626 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.968643 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.968949 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.968693 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.968841 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.968912 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.969906 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.969928 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.970237 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.970248 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.970777 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.970979 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.971004 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.971262 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.971654 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.971649 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.971734 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.972541 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.972742 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.972727 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.972284 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: E0104 11:48:23.973165 5003 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.973614 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: E0104 11:48:23.973775 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:24.473744396 +0000 UTC m=+19.946774267 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.975762 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.976095 5003 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.976475 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.977068 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.973951 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.973966 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.974518 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.974550 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.975276 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: E0104 11:48:23.975821 5003 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.978077 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.978231 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.973903 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.977969 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.978262 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.977505 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.978398 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: E0104 11:48:23.978515 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:24.478491889 +0000 UTC m=+19.951521760 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.978771 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.977136 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.976645 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.978953 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979045 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979203 5003 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979238 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979307 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979336 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979361 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979384 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979407 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979427 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979449 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979475 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979498 5003 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979520 5003 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979540 5003 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979560 5003 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979582 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979603 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979624 5003 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979642 5003 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979664 5003 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979684 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979704 5003 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979726 5003 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979746 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979765 5003 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979786 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979805 5003 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979825 5003 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979845 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979864 5003 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979886 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979907 5003 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979929 5003 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979950 5003 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979970 5003 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.979994 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.980031 5003 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.980052 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.980073 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.980094 5003 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.980115 5003 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.980135 5003 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.980136 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.980154 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.980174 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.980195 5003 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.980200 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.980216 5003 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.980234 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.980256 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.980275 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.980339 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.980530 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.980603 5003 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.980627 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981096 5003 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981038 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981122 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981262 5003 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981282 5003 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981301 5003 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981322 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981343 5003 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981358 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981363 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981404 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981417 5003 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981429 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981440 5003 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981453 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981467 5003 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981477 5003 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981515 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981543 5003 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981556 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981569 5003 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981585 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981609 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981625 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981640 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981653 5003 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981668 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.977306 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981687 5003 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981772 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981802 5003 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981827 5003 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981855 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981878 5003 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981899 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981920 5003 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981945 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981969 5003 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981991 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982093 5003 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982120 5003 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982141 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982162 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982184 5003 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982205 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982228 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982249 5003 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982269 5003 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982289 5003 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982310 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982374 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982398 5003 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982426 5003 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982447 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982467 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982489 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982510 5003 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982530 5003 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982551 5003 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982570 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982590 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982613 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982634 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982654 5003 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982673 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982695 5003 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982715 5003 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982735 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982758 5003 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982777 5003 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982799 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982822 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982851 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982871 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982900 5003 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982920 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982940 5003 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982963 5003 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.981944 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982068 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982985 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.983117 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.983145 5003 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.983170 5003 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.983197 5003 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.983225 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.983250 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.982535 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.983288 5003 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.983310 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.983329 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.983349 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.983371 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.983380 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.983391 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.983414 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.983919 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.983946 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.992505 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.992629 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.993050 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.992773 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.993301 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.993548 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: E0104 11:48:23.994908 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 04 11:48:23 crc kubenswrapper[5003]: E0104 11:48:23.994956 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 04 11:48:23 crc kubenswrapper[5003]: E0104 11:48:23.994979 5003 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.995255 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:23 crc kubenswrapper[5003]: E0104 11:48:23.995584 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 04 11:48:23 crc kubenswrapper[5003]: E0104 11:48:23.995632 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 04 11:48:23 crc kubenswrapper[5003]: E0104 11:48:23.995651 5003 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:23 crc kubenswrapper[5003]: E0104 11:48:23.996071 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:24.496045466 +0000 UTC m=+19.969075357 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:23 crc kubenswrapper[5003]: E0104 11:48:23.996100 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:24.496094477 +0000 UTC m=+19.969124308 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.996365 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.996451 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.998051 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 04 11:48:23 crc kubenswrapper[5003]: I0104 11:48:23.998897 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.000754 5003 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84" exitCode=255 Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.000922 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84"} Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.001392 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.002384 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.002736 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.006161 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.006827 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.006932 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.007087 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.007199 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.007967 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.007475 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.009319 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.011261 5003 scope.go:117] "RemoveContainer" containerID="d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.011545 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.012427 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.013036 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.012995 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.013111 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.013135 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.013229 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: E0104 11:48:24.013668 5003 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.015135 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.015133 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.015854 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.016193 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.016284 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.016326 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.016686 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.016712 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.017231 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.017268 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.018393 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.019128 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.019259 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.019781 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.019845 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.020227 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.020294 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.020550 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.028232 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.040805 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.045397 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.051550 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.053074 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.070067 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.082471 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.085386 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.085489 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.085564 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.085588 5003 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.085610 5003 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.085631 5003 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.085654 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.085666 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.085682 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.085751 5003 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.085770 5003 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.085765 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.085785 5003 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.085802 5003 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.085839 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.085864 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.085891 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.085921 5003 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.085948 5003 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086050 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086071 5003 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086119 5003 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086139 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086159 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086179 5003 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086198 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086218 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086238 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086257 5003 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086275 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086295 5003 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086315 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086336 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086360 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086381 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086400 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086421 5003 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086442 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086462 5003 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086481 5003 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086499 5003 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086518 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086537 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086557 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086576 5003 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086596 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086621 5003 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086642 5003 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086661 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086680 5003 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086699 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086718 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086736 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086754 5003 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086772 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086797 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086822 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086849 5003 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.086876 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.090058 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.094083 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.098182 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.381211 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 04 11:48:24 crc kubenswrapper[5003]: W0104 11:48:24.395409 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-2aab87bb2f243885a586c3a7eab1f1c49def3b63faefa16fdbe84f169acd1f78 WatchSource:0}: Error finding container 2aab87bb2f243885a586c3a7eab1f1c49def3b63faefa16fdbe84f169acd1f78: Status 404 returned error can't find the container with id 2aab87bb2f243885a586c3a7eab1f1c49def3b63faefa16fdbe84f169acd1f78 Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.491540 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.491660 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.491743 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:24 crc kubenswrapper[5003]: E0104 11:48:24.491816 5003 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 04 11:48:24 crc kubenswrapper[5003]: E0104 11:48:24.491863 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:48:25.491813975 +0000 UTC m=+20.964843846 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:48:24 crc kubenswrapper[5003]: E0104 11:48:24.491929 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:25.491914497 +0000 UTC m=+20.964944378 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 04 11:48:24 crc kubenswrapper[5003]: E0104 11:48:24.492155 5003 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 04 11:48:24 crc kubenswrapper[5003]: E0104 11:48:24.492241 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:25.492227705 +0000 UTC m=+20.965257576 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.593312 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.593354 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:24 crc kubenswrapper[5003]: E0104 11:48:24.593510 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 04 11:48:24 crc kubenswrapper[5003]: E0104 11:48:24.593527 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 04 11:48:24 crc kubenswrapper[5003]: E0104 11:48:24.593539 5003 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:24 crc kubenswrapper[5003]: E0104 11:48:24.593588 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:25.593572512 +0000 UTC m=+21.066602343 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:24 crc kubenswrapper[5003]: E0104 11:48:24.593713 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 04 11:48:24 crc kubenswrapper[5003]: E0104 11:48:24.593767 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 04 11:48:24 crc kubenswrapper[5003]: E0104 11:48:24.593792 5003 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:24 crc kubenswrapper[5003]: E0104 11:48:24.593914 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:25.59387257 +0000 UTC m=+21.066902601 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.811641 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.812401 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.813541 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.814432 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.815326 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.817108 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.818131 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.818722 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.819371 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.819886 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.820416 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.821119 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.821658 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.822233 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.822769 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.823332 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.823317 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.823873 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.824290 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.824842 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.825468 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.825923 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.826583 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.827044 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.830824 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.831369 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.832205 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.833245 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.833705 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.834822 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.835284 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.836122 5003 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.836221 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.837787 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.838843 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.839252 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.840758 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.841407 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.842308 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.842903 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.844034 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.844467 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.844925 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.845427 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.846105 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.847095 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.847567 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.848568 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.849199 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.850323 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.850787 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.851707 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.852173 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.853050 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.853607 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.854112 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.861289 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.889099 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.915847 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.937572 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.962328 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:24 crc kubenswrapper[5003]: I0104 11:48:24.980818 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:25 crc kubenswrapper[5003]: I0104 11:48:25.006339 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3"} Jan 04 11:48:25 crc kubenswrapper[5003]: I0104 11:48:25.006393 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"2aab87bb2f243885a586c3a7eab1f1c49def3b63faefa16fdbe84f169acd1f78"} Jan 04 11:48:25 crc kubenswrapper[5003]: I0104 11:48:25.007899 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"415b6d4715664a32a7f4c32c645321274ef780c29aa43a77ac73b5d600938c04"} Jan 04 11:48:25 crc kubenswrapper[5003]: I0104 11:48:25.009576 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24"} Jan 04 11:48:25 crc kubenswrapper[5003]: I0104 11:48:25.009607 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d"} Jan 04 11:48:25 crc kubenswrapper[5003]: I0104 11:48:25.009618 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"6172f171c7b2113d39a9171a4777b6cec4a1cc3999b256ba75086c81341fa6f3"} Jan 04 11:48:25 crc kubenswrapper[5003]: I0104 11:48:25.012444 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 04 11:48:25 crc kubenswrapper[5003]: I0104 11:48:25.013858 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423"} Jan 04 11:48:25 crc kubenswrapper[5003]: I0104 11:48:25.027754 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:25Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:25 crc kubenswrapper[5003]: I0104 11:48:25.054590 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:25Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:25 crc kubenswrapper[5003]: I0104 11:48:25.069872 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:25Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:25 crc kubenswrapper[5003]: I0104 11:48:25.086491 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:25Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:25 crc kubenswrapper[5003]: I0104 11:48:25.104260 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:25Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:25 crc kubenswrapper[5003]: I0104 11:48:25.118133 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:25Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:25 crc kubenswrapper[5003]: I0104 11:48:25.141760 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:25Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:25 crc kubenswrapper[5003]: I0104 11:48:25.199234 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:25Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:25 crc kubenswrapper[5003]: I0104 11:48:25.230454 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:25Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:25 crc kubenswrapper[5003]: I0104 11:48:25.253255 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:25Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:25 crc kubenswrapper[5003]: I0104 11:48:25.266226 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:25Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:25 crc kubenswrapper[5003]: I0104 11:48:25.280421 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:25Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:25 crc kubenswrapper[5003]: I0104 11:48:25.298538 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:25Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:25 crc kubenswrapper[5003]: I0104 11:48:25.312982 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:25Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:25 crc kubenswrapper[5003]: I0104 11:48:25.327548 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:25Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:25 crc kubenswrapper[5003]: I0104 11:48:25.344219 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:25Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:25 crc kubenswrapper[5003]: I0104 11:48:25.500094 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:48:25 crc kubenswrapper[5003]: I0104 11:48:25.500214 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:25 crc kubenswrapper[5003]: E0104 11:48:25.500287 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:48:27.500247261 +0000 UTC m=+22.973277102 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:48:25 crc kubenswrapper[5003]: E0104 11:48:25.500372 5003 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 04 11:48:25 crc kubenswrapper[5003]: E0104 11:48:25.500492 5003 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 04 11:48:25 crc kubenswrapper[5003]: E0104 11:48:25.500500 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:27.500470957 +0000 UTC m=+22.973500808 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 04 11:48:25 crc kubenswrapper[5003]: I0104 11:48:25.500386 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:25 crc kubenswrapper[5003]: E0104 11:48:25.500541 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:27.500534519 +0000 UTC m=+22.973564360 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 04 11:48:25 crc kubenswrapper[5003]: I0104 11:48:25.601950 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:25 crc kubenswrapper[5003]: I0104 11:48:25.602088 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:25 crc kubenswrapper[5003]: E0104 11:48:25.602245 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 04 11:48:25 crc kubenswrapper[5003]: E0104 11:48:25.602270 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 04 11:48:25 crc kubenswrapper[5003]: E0104 11:48:25.602285 5003 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:25 crc kubenswrapper[5003]: E0104 11:48:25.602311 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 04 11:48:25 crc kubenswrapper[5003]: E0104 11:48:25.602382 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 04 11:48:25 crc kubenswrapper[5003]: E0104 11:48:25.602439 5003 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:25 crc kubenswrapper[5003]: E0104 11:48:25.602353 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:27.602334227 +0000 UTC m=+23.075364068 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:25 crc kubenswrapper[5003]: E0104 11:48:25.602542 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:27.602517092 +0000 UTC m=+23.075546933 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:25 crc kubenswrapper[5003]: I0104 11:48:25.806205 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:25 crc kubenswrapper[5003]: I0104 11:48:25.806327 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:25 crc kubenswrapper[5003]: I0104 11:48:25.806245 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:25 crc kubenswrapper[5003]: E0104 11:48:25.806403 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:48:25 crc kubenswrapper[5003]: E0104 11:48:25.806583 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:48:25 crc kubenswrapper[5003]: E0104 11:48:25.806694 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:48:26 crc kubenswrapper[5003]: I0104 11:48:26.017647 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.523190 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.523377 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:27 crc kubenswrapper[5003]: E0104 11:48:27.523550 5003 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 04 11:48:27 crc kubenswrapper[5003]: E0104 11:48:27.523567 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:48:31.523512281 +0000 UTC m=+26.996542162 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:48:27 crc kubenswrapper[5003]: E0104 11:48:27.523649 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:31.523621164 +0000 UTC m=+26.996651045 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.523710 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:27 crc kubenswrapper[5003]: E0104 11:48:27.523922 5003 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 04 11:48:27 crc kubenswrapper[5003]: E0104 11:48:27.523994 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:31.523976313 +0000 UTC m=+26.997006184 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.569961 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.592805 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.595630 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:27Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.596343 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.613129 5003 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.616102 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.616174 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.616194 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.616310 5003 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.619891 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:27Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.624245 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.624314 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:27 crc kubenswrapper[5003]: E0104 11:48:27.624531 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 04 11:48:27 crc kubenswrapper[5003]: E0104 11:48:27.624561 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 04 11:48:27 crc kubenswrapper[5003]: E0104 11:48:27.624589 5003 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:27 crc kubenswrapper[5003]: E0104 11:48:27.624620 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 04 11:48:27 crc kubenswrapper[5003]: E0104 11:48:27.624673 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:31.624648552 +0000 UTC m=+27.097678433 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:27 crc kubenswrapper[5003]: E0104 11:48:27.624675 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 04 11:48:27 crc kubenswrapper[5003]: E0104 11:48:27.624770 5003 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:27 crc kubenswrapper[5003]: E0104 11:48:27.624884 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:31.624848197 +0000 UTC m=+27.097878078 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.626843 5003 kubelet_node_status.go:115] "Node was previously registered" node="crc" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.627369 5003 kubelet_node_status.go:79] "Successfully registered node" node="crc" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.628961 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.629009 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.629054 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.629080 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.629100 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:27Z","lastTransitionTime":"2026-01-04T11:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.644764 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:27Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.667298 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:27Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:27 crc kubenswrapper[5003]: E0104 11:48:27.673105 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:27Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.680391 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.680482 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.680498 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.680524 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.680564 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:27Z","lastTransitionTime":"2026-01-04T11:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.696406 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:27Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:27 crc kubenswrapper[5003]: E0104 11:48:27.700242 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:27Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.709908 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.709950 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.709961 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.709981 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.709993 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:27Z","lastTransitionTime":"2026-01-04T11:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.732331 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:27Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:27 crc kubenswrapper[5003]: E0104 11:48:27.741982 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:27Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.748223 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.748292 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.748306 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.748329 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.748343 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:27Z","lastTransitionTime":"2026-01-04T11:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.764452 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:27Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:27 crc kubenswrapper[5003]: E0104 11:48:27.769717 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:27Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.778061 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.778124 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.778156 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.778184 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.778204 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:27Z","lastTransitionTime":"2026-01-04T11:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.786762 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:27Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.805977 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.806003 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.806059 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:27 crc kubenswrapper[5003]: E0104 11:48:27.805977 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:27Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:27 crc kubenswrapper[5003]: E0104 11:48:27.806311 5003 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 04 11:48:27 crc kubenswrapper[5003]: E0104 11:48:27.806354 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:48:27 crc kubenswrapper[5003]: E0104 11:48:27.806479 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:48:27 crc kubenswrapper[5003]: E0104 11:48:27.806673 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.808121 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.808165 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.808179 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.808194 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.808203 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:27Z","lastTransitionTime":"2026-01-04T11:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.816413 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:27Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.820420 5003 csr.go:261] certificate signing request csr-269ms is approved, waiting to be issued Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.833366 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:27Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.841063 5003 csr.go:257] certificate signing request csr-269ms is issued Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.851973 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:27Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.875426 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:27Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.899059 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:27Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.915627 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.915680 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.915691 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.915708 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.915729 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:27Z","lastTransitionTime":"2026-01-04T11:48:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.920278 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:27Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.951158 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:27Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.966825 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:27Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:27 crc kubenswrapper[5003]: I0104 11:48:27.980453 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:27Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.018570 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.018599 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.018609 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.018624 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.018634 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:28Z","lastTransitionTime":"2026-01-04T11:48:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.023195 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496"} Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.048553 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.065845 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.082366 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:28 crc kubenswrapper[5003]: E0104 11:48:28.096528 5003 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"etcd-crc\" already exists" pod="openshift-etcd/etcd-crc" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.096838 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.116631 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.121439 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.121504 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.121519 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.121546 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.121563 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:28Z","lastTransitionTime":"2026-01-04T11:48:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.132084 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.151930 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.184346 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.224132 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.224252 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.224291 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.224303 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.224323 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.224338 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:28Z","lastTransitionTime":"2026-01-04T11:48:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.289774 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-2kmwl"] Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.290199 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-2kmwl" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.292266 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.292398 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.293171 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.309244 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.326901 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.326946 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.326957 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.326975 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.327001 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:28Z","lastTransitionTime":"2026-01-04T11:48:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.328327 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.330247 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/70c77c4b-a997-4714-9708-d2b725bfe5c5-hosts-file\") pod \"node-resolver-2kmwl\" (UID: \"70c77c4b-a997-4714-9708-d2b725bfe5c5\") " pod="openshift-dns/node-resolver-2kmwl" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.330282 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6892\" (UniqueName: \"kubernetes.io/projected/70c77c4b-a997-4714-9708-d2b725bfe5c5-kube-api-access-v6892\") pod \"node-resolver-2kmwl\" (UID: \"70c77c4b-a997-4714-9708-d2b725bfe5c5\") " pod="openshift-dns/node-resolver-2kmwl" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.342186 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.355987 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.373305 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.388367 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.408740 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.430102 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.430147 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.430157 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.430173 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.430186 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:28Z","lastTransitionTime":"2026-01-04T11:48:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.430660 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/70c77c4b-a997-4714-9708-d2b725bfe5c5-hosts-file\") pod \"node-resolver-2kmwl\" (UID: \"70c77c4b-a997-4714-9708-d2b725bfe5c5\") " pod="openshift-dns/node-resolver-2kmwl" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.430694 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6892\" (UniqueName: \"kubernetes.io/projected/70c77c4b-a997-4714-9708-d2b725bfe5c5-kube-api-access-v6892\") pod \"node-resolver-2kmwl\" (UID: \"70c77c4b-a997-4714-9708-d2b725bfe5c5\") " pod="openshift-dns/node-resolver-2kmwl" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.430765 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/70c77c4b-a997-4714-9708-d2b725bfe5c5-hosts-file\") pod \"node-resolver-2kmwl\" (UID: \"70c77c4b-a997-4714-9708-d2b725bfe5c5\") " pod="openshift-dns/node-resolver-2kmwl" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.433233 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.445266 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.455905 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6892\" (UniqueName: \"kubernetes.io/projected/70c77c4b-a997-4714-9708-d2b725bfe5c5-kube-api-access-v6892\") pod \"node-resolver-2kmwl\" (UID: \"70c77c4b-a997-4714-9708-d2b725bfe5c5\") " pod="openshift-dns/node-resolver-2kmwl" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.459204 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.533123 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.533188 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.533206 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.533231 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.533249 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:28Z","lastTransitionTime":"2026-01-04T11:48:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.603664 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-2kmwl" Jan 04 11:48:28 crc kubenswrapper[5003]: W0104 11:48:28.620639 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod70c77c4b_a997_4714_9708_d2b725bfe5c5.slice/crio-3cb45d05b12a5a838e435ef14e16fa6a970bb0a5559570c2f1ec203ca6c9b484 WatchSource:0}: Error finding container 3cb45d05b12a5a838e435ef14e16fa6a970bb0a5559570c2f1ec203ca6c9b484: Status 404 returned error can't find the container with id 3cb45d05b12a5a838e435ef14e16fa6a970bb0a5559570c2f1ec203ca6c9b484 Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.640531 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.640840 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.640858 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.640880 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.640895 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:28Z","lastTransitionTime":"2026-01-04T11:48:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.666905 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-qpwf5"] Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.667646 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.671386 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.672049 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.673110 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.673145 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.674330 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-np5qh"] Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.674391 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.678577 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-2lwxt"] Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.678774 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.681073 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.681073 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.683089 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-rcgwp"] Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.683770 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.684490 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.689038 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.689698 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.689898 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.689905 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.689924 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.690262 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.690284 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.690291 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.690498 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.690345 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.690572 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.690679 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.696049 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.718435 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.732501 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-multus-socket-dir-parent\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.732555 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-multus-cni-dir\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.732588 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-cnibin\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.732613 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-slash\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.732640 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-env-overrides\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.732660 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-etc-kubernetes\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.732682 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-run-openvswitch\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.732708 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-system-cni-dir\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.732737 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-host-var-lib-kubelet\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.732760 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1d785a1a-7eaf-4192-915a-49f478c2a59a-mcd-auth-proxy-config\") pod \"machine-config-daemon-rcgwp\" (UID: \"1d785a1a-7eaf-4192-915a-49f478c2a59a\") " pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.732783 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-systemd-units\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.732803 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-var-lib-openvswitch\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.732824 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-cni-netd\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.732847 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a8de5487-0fcc-4344-a821-e485f3090ecb-os-release\") pod \"multus-additional-cni-plugins-qpwf5\" (UID: \"a8de5487-0fcc-4344-a821-e485f3090ecb\") " pod="openshift-multus/multus-additional-cni-plugins-qpwf5" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.732872 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1d785a1a-7eaf-4192-915a-49f478c2a59a-proxy-tls\") pod \"machine-config-daemon-rcgwp\" (UID: \"1d785a1a-7eaf-4192-915a-49f478c2a59a\") " pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.732895 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-host-var-lib-cni-multus\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.732921 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-multus-daemon-config\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.732942 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-run-systemd\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.732969 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-os-release\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.732992 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-host-run-k8s-cni-cncf-io\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.733031 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-cni-bin\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.733055 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a8de5487-0fcc-4344-a821-e485f3090ecb-system-cni-dir\") pod \"multus-additional-cni-plugins-qpwf5\" (UID: \"a8de5487-0fcc-4344-a821-e485f3090ecb\") " pod="openshift-multus/multus-additional-cni-plugins-qpwf5" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.733087 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-run-ovn\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.733109 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-ovn-node-metrics-cert\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.733133 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhg5j\" (UniqueName: \"kubernetes.io/projected/1d785a1a-7eaf-4192-915a-49f478c2a59a-kube-api-access-xhg5j\") pod \"machine-config-daemon-rcgwp\" (UID: \"1d785a1a-7eaf-4192-915a-49f478c2a59a\") " pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.733165 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-hostroot\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.733187 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-node-log\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.733211 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-etc-openvswitch\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.733237 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-log-socket\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.733260 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-ovnkube-script-lib\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.733284 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w66t5\" (UniqueName: \"kubernetes.io/projected/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-kube-api-access-w66t5\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.733307 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvkrg\" (UniqueName: \"kubernetes.io/projected/a8de5487-0fcc-4344-a821-e485f3090ecb-kube-api-access-cvkrg\") pod \"multus-additional-cni-plugins-qpwf5\" (UID: \"a8de5487-0fcc-4344-a821-e485f3090ecb\") " pod="openshift-multus/multus-additional-cni-plugins-qpwf5" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.733333 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-host-var-lib-cni-bin\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.733365 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a8de5487-0fcc-4344-a821-e485f3090ecb-tuning-conf-dir\") pod \"multus-additional-cni-plugins-qpwf5\" (UID: \"a8de5487-0fcc-4344-a821-e485f3090ecb\") " pod="openshift-multus/multus-additional-cni-plugins-qpwf5" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.733388 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/1d785a1a-7eaf-4192-915a-49f478c2a59a-rootfs\") pod \"machine-config-daemon-rcgwp\" (UID: \"1d785a1a-7eaf-4192-915a-49f478c2a59a\") " pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.733409 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-multus-conf-dir\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.733435 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.733461 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-ovnkube-config\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.733497 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-cni-binary-copy\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.733521 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-host-run-netns\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.733558 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-run-ovn-kubernetes\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.733581 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a8de5487-0fcc-4344-a821-e485f3090ecb-cni-binary-copy\") pod \"multus-additional-cni-plugins-qpwf5\" (UID: \"a8de5487-0fcc-4344-a821-e485f3090ecb\") " pod="openshift-multus/multus-additional-cni-plugins-qpwf5" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.733600 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a8de5487-0fcc-4344-a821-e485f3090ecb-cnibin\") pod \"multus-additional-cni-plugins-qpwf5\" (UID: \"a8de5487-0fcc-4344-a821-e485f3090ecb\") " pod="openshift-multus/multus-additional-cni-plugins-qpwf5" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.733629 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/a8de5487-0fcc-4344-a821-e485f3090ecb-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-qpwf5\" (UID: \"a8de5487-0fcc-4344-a821-e485f3090ecb\") " pod="openshift-multus/multus-additional-cni-plugins-qpwf5" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.733662 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-host-run-multus-certs\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.733697 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-kubelet\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.733717 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-run-netns\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.733736 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ss2hj\" (UniqueName: \"kubernetes.io/projected/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-kube-api-access-ss2hj\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.751540 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.754220 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.754278 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.754292 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.754315 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.754331 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:28Z","lastTransitionTime":"2026-01-04T11:48:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.772412 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.811356 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.835414 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.835459 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-ovnkube-config\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.835478 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-multus-conf-dir\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.835506 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-cni-binary-copy\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.835523 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-host-run-netns\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.835539 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-run-ovn-kubernetes\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.835554 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a8de5487-0fcc-4344-a821-e485f3090ecb-cni-binary-copy\") pod \"multus-additional-cni-plugins-qpwf5\" (UID: \"a8de5487-0fcc-4344-a821-e485f3090ecb\") " pod="openshift-multus/multus-additional-cni-plugins-qpwf5" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.835570 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/a8de5487-0fcc-4344-a821-e485f3090ecb-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-qpwf5\" (UID: \"a8de5487-0fcc-4344-a821-e485f3090ecb\") " pod="openshift-multus/multus-additional-cni-plugins-qpwf5" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.835591 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-host-run-multus-certs\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.835605 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-kubelet\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.835619 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-run-netns\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.835637 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ss2hj\" (UniqueName: \"kubernetes.io/projected/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-kube-api-access-ss2hj\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.835663 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a8de5487-0fcc-4344-a821-e485f3090ecb-cnibin\") pod \"multus-additional-cni-plugins-qpwf5\" (UID: \"a8de5487-0fcc-4344-a821-e485f3090ecb\") " pod="openshift-multus/multus-additional-cni-plugins-qpwf5" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.835686 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-multus-socket-dir-parent\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.835703 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-multus-cni-dir\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.835718 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-cnibin\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.835733 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-slash\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.835749 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-env-overrides\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.835766 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-run-openvswitch\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.835786 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-system-cni-dir\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.835802 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-host-var-lib-kubelet\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.835818 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-etc-kubernetes\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.835832 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-systemd-units\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.835850 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-var-lib-openvswitch\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.835866 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-cni-netd\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.835884 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a8de5487-0fcc-4344-a821-e485f3090ecb-os-release\") pod \"multus-additional-cni-plugins-qpwf5\" (UID: \"a8de5487-0fcc-4344-a821-e485f3090ecb\") " pod="openshift-multus/multus-additional-cni-plugins-qpwf5" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.835905 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1d785a1a-7eaf-4192-915a-49f478c2a59a-proxy-tls\") pod \"machine-config-daemon-rcgwp\" (UID: \"1d785a1a-7eaf-4192-915a-49f478c2a59a\") " pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.835924 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1d785a1a-7eaf-4192-915a-49f478c2a59a-mcd-auth-proxy-config\") pod \"machine-config-daemon-rcgwp\" (UID: \"1d785a1a-7eaf-4192-915a-49f478c2a59a\") " pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.835947 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-multus-daemon-config\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.835963 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-run-systemd\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.835979 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-os-release\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.835996 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-host-run-k8s-cni-cncf-io\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.836033 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-host-var-lib-cni-multus\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.836049 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-cni-bin\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.836065 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a8de5487-0fcc-4344-a821-e485f3090ecb-system-cni-dir\") pod \"multus-additional-cni-plugins-qpwf5\" (UID: \"a8de5487-0fcc-4344-a821-e485f3090ecb\") " pod="openshift-multus/multus-additional-cni-plugins-qpwf5" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.836088 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-run-ovn\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.836105 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-ovn-node-metrics-cert\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.836120 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhg5j\" (UniqueName: \"kubernetes.io/projected/1d785a1a-7eaf-4192-915a-49f478c2a59a-kube-api-access-xhg5j\") pod \"machine-config-daemon-rcgwp\" (UID: \"1d785a1a-7eaf-4192-915a-49f478c2a59a\") " pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.836135 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-hostroot\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.836151 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-node-log\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.836166 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-etc-openvswitch\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.836167 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-slash\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.836241 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-log-socket\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.836271 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.836569 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-kubelet\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.836606 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-multus-conf-dir\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.836742 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-env-overrides\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.836804 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-run-openvswitch\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.836841 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-system-cni-dir\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.836866 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-host-var-lib-kubelet\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.836889 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-etc-kubernetes\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.836911 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-systemd-units\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.836949 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-var-lib-openvswitch\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.836971 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-cni-netd\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.837033 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a8de5487-0fcc-4344-a821-e485f3090ecb-os-release\") pod \"multus-additional-cni-plugins-qpwf5\" (UID: \"a8de5487-0fcc-4344-a821-e485f3090ecb\") " pod="openshift-multus/multus-additional-cni-plugins-qpwf5" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.837177 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-cni-binary-copy\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.837218 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-host-run-netns\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.837246 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-run-ovn-kubernetes\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.837701 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a8de5487-0fcc-4344-a821-e485f3090ecb-cni-binary-copy\") pod \"multus-additional-cni-plugins-qpwf5\" (UID: \"a8de5487-0fcc-4344-a821-e485f3090ecb\") " pod="openshift-multus/multus-additional-cni-plugins-qpwf5" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.838169 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-multus-socket-dir-parent\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.838207 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-run-netns\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.838531 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a8de5487-0fcc-4344-a821-e485f3090ecb-cnibin\") pod \"multus-additional-cni-plugins-qpwf5\" (UID: \"a8de5487-0fcc-4344-a821-e485f3090ecb\") " pod="openshift-multus/multus-additional-cni-plugins-qpwf5" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.838563 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-cni-bin\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.839105 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/a8de5487-0fcc-4344-a821-e485f3090ecb-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-qpwf5\" (UID: \"a8de5487-0fcc-4344-a821-e485f3090ecb\") " pod="openshift-multus/multus-additional-cni-plugins-qpwf5" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.839175 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-host-run-multus-certs\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.839295 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-os-release\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.839353 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1d785a1a-7eaf-4192-915a-49f478c2a59a-mcd-auth-proxy-config\") pod \"machine-config-daemon-rcgwp\" (UID: \"1d785a1a-7eaf-4192-915a-49f478c2a59a\") " pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.839397 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-host-run-k8s-cni-cncf-io\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.839424 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-host-var-lib-cni-multus\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.839450 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-run-ovn\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.839477 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a8de5487-0fcc-4344-a821-e485f3090ecb-system-cni-dir\") pod \"multus-additional-cni-plugins-qpwf5\" (UID: \"a8de5487-0fcc-4344-a821-e485f3090ecb\") " pod="openshift-multus/multus-additional-cni-plugins-qpwf5" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.839512 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-cnibin\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.839706 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-multus-cni-dir\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.839733 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-run-systemd\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.839778 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-node-log\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.839812 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-hostroot\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.836181 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-log-socket\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.839850 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-ovnkube-script-lib\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.839871 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w66t5\" (UniqueName: \"kubernetes.io/projected/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-kube-api-access-w66t5\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.839888 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvkrg\" (UniqueName: \"kubernetes.io/projected/a8de5487-0fcc-4344-a821-e485f3090ecb-kube-api-access-cvkrg\") pod \"multus-additional-cni-plugins-qpwf5\" (UID: \"a8de5487-0fcc-4344-a821-e485f3090ecb\") " pod="openshift-multus/multus-additional-cni-plugins-qpwf5" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.839906 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-host-var-lib-cni-bin\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.839923 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a8de5487-0fcc-4344-a821-e485f3090ecb-tuning-conf-dir\") pod \"multus-additional-cni-plugins-qpwf5\" (UID: \"a8de5487-0fcc-4344-a821-e485f3090ecb\") " pod="openshift-multus/multus-additional-cni-plugins-qpwf5" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.839943 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/1d785a1a-7eaf-4192-915a-49f478c2a59a-rootfs\") pod \"machine-config-daemon-rcgwp\" (UID: \"1d785a1a-7eaf-4192-915a-49f478c2a59a\") " pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.839951 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-multus-daemon-config\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.839994 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/1d785a1a-7eaf-4192-915a-49f478c2a59a-rootfs\") pod \"machine-config-daemon-rcgwp\" (UID: \"1d785a1a-7eaf-4192-915a-49f478c2a59a\") " pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.840055 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-host-var-lib-cni-bin\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.840424 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-etc-openvswitch\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.843102 5003 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-01-04 11:43:27 +0000 UTC, rotation deadline is 2026-10-10 01:38:00.360102667 +0000 UTC Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.843147 5003 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 6685h49m31.516957993s for next certificate rotation Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.845301 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a8de5487-0fcc-4344-a821-e485f3090ecb-tuning-conf-dir\") pod \"multus-additional-cni-plugins-qpwf5\" (UID: \"a8de5487-0fcc-4344-a821-e485f3090ecb\") " pod="openshift-multus/multus-additional-cni-plugins-qpwf5" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.845677 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-ovnkube-config\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.847513 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-ovnkube-script-lib\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.847531 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1d785a1a-7eaf-4192-915a-49f478c2a59a-proxy-tls\") pod \"machine-config-daemon-rcgwp\" (UID: \"1d785a1a-7eaf-4192-915a-49f478c2a59a\") " pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.855576 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-ovn-node-metrics-cert\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.859366 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.861447 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.861488 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.861502 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.861524 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.861539 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:28Z","lastTransitionTime":"2026-01-04T11:48:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.866857 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ss2hj\" (UniqueName: \"kubernetes.io/projected/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-kube-api-access-ss2hj\") pod \"ovnkube-node-2lwxt\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.869326 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvkrg\" (UniqueName: \"kubernetes.io/projected/a8de5487-0fcc-4344-a821-e485f3090ecb-kube-api-access-cvkrg\") pod \"multus-additional-cni-plugins-qpwf5\" (UID: \"a8de5487-0fcc-4344-a821-e485f3090ecb\") " pod="openshift-multus/multus-additional-cni-plugins-qpwf5" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.872676 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w66t5\" (UniqueName: \"kubernetes.io/projected/6e5d41d8-142e-4ca3-a20a-f6d338aaddf2-kube-api-access-w66t5\") pod \"multus-np5qh\" (UID: \"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\") " pod="openshift-multus/multus-np5qh" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.884768 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhg5j\" (UniqueName: \"kubernetes.io/projected/1d785a1a-7eaf-4192-915a-49f478c2a59a-kube-api-access-xhg5j\") pod \"machine-config-daemon-rcgwp\" (UID: \"1d785a1a-7eaf-4192-915a-49f478c2a59a\") " pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.894618 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.917210 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.936493 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.951777 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.966680 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.966732 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.966743 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.966760 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.966771 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:28Z","lastTransitionTime":"2026-01-04T11:48:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.980339 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:28 crc kubenswrapper[5003]: I0104 11:48:28.990202 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:28.997575 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:28Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.015121 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-np5qh" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.022194 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.031069 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" event={"ID":"a8de5487-0fcc-4344-a821-e485f3090ecb","Type":"ContainerStarted","Data":"1861357e86333f6f01cb1bc43e0403c56243303e0917a3688148e6909ce71060"} Jan 04 11:48:29 crc kubenswrapper[5003]: W0104 11:48:29.031851 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6e5d41d8_142e_4ca3_a20a_f6d338aaddf2.slice/crio-30dee2d5f4337462debc8428fb41b7e5dc3231a5bd03d6d4bea54058afb7bfcc WatchSource:0}: Error finding container 30dee2d5f4337462debc8428fb41b7e5dc3231a5bd03d6d4bea54058afb7bfcc: Status 404 returned error can't find the container with id 30dee2d5f4337462debc8428fb41b7e5dc3231a5bd03d6d4bea54058afb7bfcc Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.033325 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-2kmwl" event={"ID":"70c77c4b-a997-4714-9708-d2b725bfe5c5","Type":"ContainerStarted","Data":"9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9"} Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.033409 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-2kmwl" event={"ID":"70c77c4b-a997-4714-9708-d2b725bfe5c5","Type":"ContainerStarted","Data":"3cb45d05b12a5a838e435ef14e16fa6a970bb0a5559570c2f1ec203ca6c9b484"} Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.034209 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.048976 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.063104 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.071805 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.071844 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.071858 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.071878 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.071894 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:29Z","lastTransitionTime":"2026-01-04T11:48:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.073717 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.090335 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:29 crc kubenswrapper[5003]: W0104 11:48:29.094833 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1d785a1a_7eaf_4192_915a_49f478c2a59a.slice/crio-a17b43c5f532c54d7c36b136a500b21cac70cfbb9ce8a8f3de3285d03ad2a1ca WatchSource:0}: Error finding container a17b43c5f532c54d7c36b136a500b21cac70cfbb9ce8a8f3de3285d03ad2a1ca: Status 404 returned error can't find the container with id a17b43c5f532c54d7c36b136a500b21cac70cfbb9ce8a8f3de3285d03ad2a1ca Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.118834 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.147456 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.165035 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.176820 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.176865 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.176877 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.176897 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.176912 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:29Z","lastTransitionTime":"2026-01-04T11:48:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.180891 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.199713 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.217458 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.227686 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.239748 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.253840 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.266405 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.278750 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.280854 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.280892 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.280902 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.280921 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.280937 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:29Z","lastTransitionTime":"2026-01-04T11:48:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.295059 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.314378 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.330499 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.347538 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.366367 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.380872 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.383593 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.383644 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.383662 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.383681 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.383691 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:29Z","lastTransitionTime":"2026-01-04T11:48:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.401818 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.421911 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.441683 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.459389 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.473920 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.486580 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.486610 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.486620 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.486637 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.486647 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:29Z","lastTransitionTime":"2026-01-04T11:48:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.495026 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.589281 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.589333 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.589344 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.589363 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.589374 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:29Z","lastTransitionTime":"2026-01-04T11:48:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.692551 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.692954 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.692969 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.692991 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.693008 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:29Z","lastTransitionTime":"2026-01-04T11:48:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.796245 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.796288 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.796301 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.796320 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.796333 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:29Z","lastTransitionTime":"2026-01-04T11:48:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.805750 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.805805 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.805874 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:29 crc kubenswrapper[5003]: E0104 11:48:29.806028 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:48:29 crc kubenswrapper[5003]: E0104 11:48:29.806153 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:48:29 crc kubenswrapper[5003]: E0104 11:48:29.806331 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.899599 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.899662 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.899676 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.899698 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:29 crc kubenswrapper[5003]: I0104 11:48:29.899714 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:29Z","lastTransitionTime":"2026-01-04T11:48:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.002520 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.002575 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.002590 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.002610 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.002623 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:30Z","lastTransitionTime":"2026-01-04T11:48:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.039031 5003 generic.go:334] "Generic (PLEG): container finished" podID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerID="0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b" exitCode=0 Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.039107 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" event={"ID":"e40671d3-61d7-4a50-b4ea-a67e4005fc3f","Type":"ContainerDied","Data":"0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b"} Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.039144 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" event={"ID":"e40671d3-61d7-4a50-b4ea-a67e4005fc3f","Type":"ContainerStarted","Data":"79ff0fefa09b629abc8ff4c48668385df9c13ea993bf4c6098145b3ee2df541f"} Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.043285 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerStarted","Data":"074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0"} Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.043323 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerStarted","Data":"0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039"} Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.043338 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerStarted","Data":"a17b43c5f532c54d7c36b136a500b21cac70cfbb9ce8a8f3de3285d03ad2a1ca"} Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.047125 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-np5qh" event={"ID":"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2","Type":"ContainerStarted","Data":"32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1"} Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.047215 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-np5qh" event={"ID":"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2","Type":"ContainerStarted","Data":"30dee2d5f4337462debc8428fb41b7e5dc3231a5bd03d6d4bea54058afb7bfcc"} Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.050955 5003 generic.go:334] "Generic (PLEG): container finished" podID="a8de5487-0fcc-4344-a821-e485f3090ecb" containerID="81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802" exitCode=0 Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.051044 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" event={"ID":"a8de5487-0fcc-4344-a821-e485f3090ecb","Type":"ContainerDied","Data":"81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802"} Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.060701 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.085205 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.108338 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.108414 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.108427 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.108449 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.108464 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:30Z","lastTransitionTime":"2026-01-04T11:48:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.111087 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.144089 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.167219 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.181968 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.195695 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-nlgkr"] Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.196227 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-nlgkr" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.199588 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.200800 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.201038 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.201884 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.202124 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.213249 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.213279 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.213288 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.213303 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.213315 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:30Z","lastTransitionTime":"2026-01-04T11:48:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.219766 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.238037 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.252797 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/74725edc-9566-4f57-81fc-1faf878f1ede-serviceca\") pod \"node-ca-nlgkr\" (UID: \"74725edc-9566-4f57-81fc-1faf878f1ede\") " pod="openshift-image-registry/node-ca-nlgkr" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.253175 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tg64j\" (UniqueName: \"kubernetes.io/projected/74725edc-9566-4f57-81fc-1faf878f1ede-kube-api-access-tg64j\") pod \"node-ca-nlgkr\" (UID: \"74725edc-9566-4f57-81fc-1faf878f1ede\") " pod="openshift-image-registry/node-ca-nlgkr" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.253319 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/74725edc-9566-4f57-81fc-1faf878f1ede-host\") pod \"node-ca-nlgkr\" (UID: \"74725edc-9566-4f57-81fc-1faf878f1ede\") " pod="openshift-image-registry/node-ca-nlgkr" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.260134 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.272223 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.290246 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.308055 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.318531 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.318573 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.318585 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.318605 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.318618 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:30Z","lastTransitionTime":"2026-01-04T11:48:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.325623 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.341588 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.354206 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/74725edc-9566-4f57-81fc-1faf878f1ede-serviceca\") pod \"node-ca-nlgkr\" (UID: \"74725edc-9566-4f57-81fc-1faf878f1ede\") " pod="openshift-image-registry/node-ca-nlgkr" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.354963 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tg64j\" (UniqueName: \"kubernetes.io/projected/74725edc-9566-4f57-81fc-1faf878f1ede-kube-api-access-tg64j\") pod \"node-ca-nlgkr\" (UID: \"74725edc-9566-4f57-81fc-1faf878f1ede\") " pod="openshift-image-registry/node-ca-nlgkr" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.354992 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/74725edc-9566-4f57-81fc-1faf878f1ede-host\") pod \"node-ca-nlgkr\" (UID: \"74725edc-9566-4f57-81fc-1faf878f1ede\") " pod="openshift-image-registry/node-ca-nlgkr" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.355088 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/74725edc-9566-4f57-81fc-1faf878f1ede-host\") pod \"node-ca-nlgkr\" (UID: \"74725edc-9566-4f57-81fc-1faf878f1ede\") " pod="openshift-image-registry/node-ca-nlgkr" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.355956 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/74725edc-9566-4f57-81fc-1faf878f1ede-serviceca\") pod \"node-ca-nlgkr\" (UID: \"74725edc-9566-4f57-81fc-1faf878f1ede\") " pod="openshift-image-registry/node-ca-nlgkr" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.358175 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.382231 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tg64j\" (UniqueName: \"kubernetes.io/projected/74725edc-9566-4f57-81fc-1faf878f1ede-kube-api-access-tg64j\") pod \"node-ca-nlgkr\" (UID: \"74725edc-9566-4f57-81fc-1faf878f1ede\") " pod="openshift-image-registry/node-ca-nlgkr" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.387701 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.401627 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.418700 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.425324 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.425353 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.425364 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.425379 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.425392 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:30Z","lastTransitionTime":"2026-01-04T11:48:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.435668 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.448675 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.463052 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.485093 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.498336 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.512158 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.514622 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-nlgkr" Jan 04 11:48:30 crc kubenswrapper[5003]: W0104 11:48:30.530126 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod74725edc_9566_4f57_81fc_1faf878f1ede.slice/crio-97c62d12323c122ae0c081b0bd247a356752f31f95a476bd2e5fb456c3d151c9 WatchSource:0}: Error finding container 97c62d12323c122ae0c081b0bd247a356752f31f95a476bd2e5fb456c3d151c9: Status 404 returned error can't find the container with id 97c62d12323c122ae0c081b0bd247a356752f31f95a476bd2e5fb456c3d151c9 Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.532499 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.532533 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.532548 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.532567 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.532579 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:30Z","lastTransitionTime":"2026-01-04T11:48:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.536937 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.550290 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.562345 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.577333 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:30Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.636489 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.637322 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.637360 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.637384 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.637395 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:30Z","lastTransitionTime":"2026-01-04T11:48:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.742772 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.743359 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.743375 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.743397 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.743413 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:30Z","lastTransitionTime":"2026-01-04T11:48:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.853537 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.853613 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.853626 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.853648 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.853667 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:30Z","lastTransitionTime":"2026-01-04T11:48:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.956502 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.956575 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.956590 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.956613 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:30 crc kubenswrapper[5003]: I0104 11:48:30.956632 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:30Z","lastTransitionTime":"2026-01-04T11:48:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.055990 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-nlgkr" event={"ID":"74725edc-9566-4f57-81fc-1faf878f1ede","Type":"ContainerStarted","Data":"5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a"} Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.056088 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-nlgkr" event={"ID":"74725edc-9566-4f57-81fc-1faf878f1ede","Type":"ContainerStarted","Data":"97c62d12323c122ae0c081b0bd247a356752f31f95a476bd2e5fb456c3d151c9"} Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.058658 5003 generic.go:334] "Generic (PLEG): container finished" podID="a8de5487-0fcc-4344-a821-e485f3090ecb" containerID="82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f" exitCode=0 Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.058709 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" event={"ID":"a8de5487-0fcc-4344-a821-e485f3090ecb","Type":"ContainerDied","Data":"82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f"} Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.058687 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.058865 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.058895 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.058930 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.058990 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:31Z","lastTransitionTime":"2026-01-04T11:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.065384 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" event={"ID":"e40671d3-61d7-4a50-b4ea-a67e4005fc3f","Type":"ContainerStarted","Data":"65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0"} Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.065445 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" event={"ID":"e40671d3-61d7-4a50-b4ea-a67e4005fc3f","Type":"ContainerStarted","Data":"31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac"} Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.065460 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" event={"ID":"e40671d3-61d7-4a50-b4ea-a67e4005fc3f","Type":"ContainerStarted","Data":"0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0"} Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.065472 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" event={"ID":"e40671d3-61d7-4a50-b4ea-a67e4005fc3f","Type":"ContainerStarted","Data":"ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58"} Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.065484 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" event={"ID":"e40671d3-61d7-4a50-b4ea-a67e4005fc3f","Type":"ContainerStarted","Data":"b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910"} Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.065496 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" event={"ID":"e40671d3-61d7-4a50-b4ea-a67e4005fc3f","Type":"ContainerStarted","Data":"ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509"} Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.087252 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.112401 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.130051 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.147594 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.164606 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.164654 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.164663 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.164680 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.164677 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.164694 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:31Z","lastTransitionTime":"2026-01-04T11:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.180191 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.189523 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.203314 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.215078 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.228490 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.249834 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.263343 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.268217 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.268296 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.268318 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.268353 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.268373 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:31Z","lastTransitionTime":"2026-01-04T11:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.282418 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.297075 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.312845 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.337326 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.351937 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.371056 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.371472 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.371544 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.371564 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.371589 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.371610 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:31Z","lastTransitionTime":"2026-01-04T11:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.387086 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.404989 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.419186 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.440806 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.456853 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.471911 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.474798 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.474827 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.474839 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.474858 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.474870 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:31Z","lastTransitionTime":"2026-01-04T11:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.488380 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.502422 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.540722 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.568340 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.568495 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.568532 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:31 crc kubenswrapper[5003]: E0104 11:48:31.568657 5003 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 04 11:48:31 crc kubenswrapper[5003]: E0104 11:48:31.568752 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:39.568727606 +0000 UTC m=+35.041757447 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 04 11:48:31 crc kubenswrapper[5003]: E0104 11:48:31.568774 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:48:39.568765257 +0000 UTC m=+35.041795098 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:48:31 crc kubenswrapper[5003]: E0104 11:48:31.568862 5003 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 04 11:48:31 crc kubenswrapper[5003]: E0104 11:48:31.569221 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:39.569186668 +0000 UTC m=+35.042216539 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.579099 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.579158 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.579173 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.579194 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.579207 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:31Z","lastTransitionTime":"2026-01-04T11:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.582353 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.626242 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.669999 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.670131 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:31 crc kubenswrapper[5003]: E0104 11:48:31.670248 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 04 11:48:31 crc kubenswrapper[5003]: E0104 11:48:31.670291 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 04 11:48:31 crc kubenswrapper[5003]: E0104 11:48:31.670308 5003 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:31 crc kubenswrapper[5003]: E0104 11:48:31.670361 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 04 11:48:31 crc kubenswrapper[5003]: E0104 11:48:31.670385 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:39.670362261 +0000 UTC m=+35.143392112 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:31 crc kubenswrapper[5003]: E0104 11:48:31.670396 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 04 11:48:31 crc kubenswrapper[5003]: E0104 11:48:31.670421 5003 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:31 crc kubenswrapper[5003]: E0104 11:48:31.670501 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:39.670477604 +0000 UTC m=+35.143507485 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.682485 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.682566 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.682585 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.682617 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.682636 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:31Z","lastTransitionTime":"2026-01-04T11:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.689186 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:31Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.785280 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.785321 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.785332 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.785359 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.785374 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:31Z","lastTransitionTime":"2026-01-04T11:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.806639 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.806699 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.806720 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:31 crc kubenswrapper[5003]: E0104 11:48:31.806762 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:48:31 crc kubenswrapper[5003]: E0104 11:48:31.806866 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:48:31 crc kubenswrapper[5003]: E0104 11:48:31.807008 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.888540 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.888591 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.888601 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.888619 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.888634 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:31Z","lastTransitionTime":"2026-01-04T11:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.992181 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.992235 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.992245 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.992265 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:31 crc kubenswrapper[5003]: I0104 11:48:31.992287 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:31Z","lastTransitionTime":"2026-01-04T11:48:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.073664 5003 generic.go:334] "Generic (PLEG): container finished" podID="a8de5487-0fcc-4344-a821-e485f3090ecb" containerID="b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb" exitCode=0 Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.073734 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" event={"ID":"a8de5487-0fcc-4344-a821-e485f3090ecb","Type":"ContainerDied","Data":"b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb"} Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.095590 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.095652 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.095666 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.095689 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.095703 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:32Z","lastTransitionTime":"2026-01-04T11:48:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.096165 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:32Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.115845 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:32Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.138350 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:32Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.160601 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:32Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.180280 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:32Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.200006 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.200087 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.200100 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.200121 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.200135 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:32Z","lastTransitionTime":"2026-01-04T11:48:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.202704 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:32Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.221082 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:32Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.239143 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:32Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.261864 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:32Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.289337 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:32Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.303671 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.303738 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.303758 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.303782 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.303797 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:32Z","lastTransitionTime":"2026-01-04T11:48:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.305092 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:32Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.330731 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:32Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.348302 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:32Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.364563 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:32Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.385697 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:32Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.406998 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.407089 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.407102 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.407128 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.407141 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:32Z","lastTransitionTime":"2026-01-04T11:48:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.510650 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.510735 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.510760 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.510783 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.510801 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:32Z","lastTransitionTime":"2026-01-04T11:48:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.614038 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.614102 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.614117 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.614140 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.614156 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:32Z","lastTransitionTime":"2026-01-04T11:48:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.717963 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.718059 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.718080 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.718108 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.718127 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:32Z","lastTransitionTime":"2026-01-04T11:48:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.824983 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.825098 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.825113 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.825141 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.825168 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:32Z","lastTransitionTime":"2026-01-04T11:48:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.928938 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.929047 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.929067 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.929094 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:32 crc kubenswrapper[5003]: I0104 11:48:32.929115 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:32Z","lastTransitionTime":"2026-01-04T11:48:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.032865 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.032934 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.032950 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.032977 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.032997 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:33Z","lastTransitionTime":"2026-01-04T11:48:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.082929 5003 generic.go:334] "Generic (PLEG): container finished" podID="a8de5487-0fcc-4344-a821-e485f3090ecb" containerID="248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7" exitCode=0 Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.083003 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" event={"ID":"a8de5487-0fcc-4344-a821-e485f3090ecb","Type":"ContainerDied","Data":"248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7"} Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.105601 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:33Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.134828 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:33Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.136803 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.136857 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.136880 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.136910 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.136937 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:33Z","lastTransitionTime":"2026-01-04T11:48:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.156492 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:33Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.180123 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:33Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.216540 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:33Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.241267 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.241316 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.241331 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.241351 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.241364 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:33Z","lastTransitionTime":"2026-01-04T11:48:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.252947 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:33Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.271392 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:33Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.292380 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:33Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.312363 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:33Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.327465 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:33Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.344962 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.345026 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.345043 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.345080 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.345097 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:33Z","lastTransitionTime":"2026-01-04T11:48:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.345863 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:33Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.365474 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:33Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.383875 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:33Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.402691 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:33Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.418128 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:33Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.448664 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.448702 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.448713 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.448731 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.448748 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:33Z","lastTransitionTime":"2026-01-04T11:48:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.551677 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.551748 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.551768 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.551794 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.551813 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:33Z","lastTransitionTime":"2026-01-04T11:48:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.654623 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.654679 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.654696 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.654723 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.654740 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:33Z","lastTransitionTime":"2026-01-04T11:48:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.757823 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.757872 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.757883 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.757905 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.757919 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:33Z","lastTransitionTime":"2026-01-04T11:48:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.805737 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.805817 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.805897 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:33 crc kubenswrapper[5003]: E0104 11:48:33.805948 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:48:33 crc kubenswrapper[5003]: E0104 11:48:33.806116 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:48:33 crc kubenswrapper[5003]: E0104 11:48:33.806325 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.861995 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.862078 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.862092 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.862116 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.862131 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:33Z","lastTransitionTime":"2026-01-04T11:48:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.965968 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.966547 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.966570 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.966601 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:33 crc kubenswrapper[5003]: I0104 11:48:33.966621 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:33Z","lastTransitionTime":"2026-01-04T11:48:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.069585 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.069639 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.069657 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.069682 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.069701 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:34Z","lastTransitionTime":"2026-01-04T11:48:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.092098 5003 generic.go:334] "Generic (PLEG): container finished" podID="a8de5487-0fcc-4344-a821-e485f3090ecb" containerID="77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163" exitCode=0 Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.092244 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" event={"ID":"a8de5487-0fcc-4344-a821-e485f3090ecb","Type":"ContainerDied","Data":"77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163"} Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.111219 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" event={"ID":"e40671d3-61d7-4a50-b4ea-a67e4005fc3f","Type":"ContainerStarted","Data":"f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039"} Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.124053 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.140080 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.160155 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.173115 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.173172 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.173185 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.173206 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.173221 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:34Z","lastTransitionTime":"2026-01-04T11:48:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.182493 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.200089 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.219256 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.257484 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.276189 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.280108 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.280150 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.280162 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.280183 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.280200 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:34Z","lastTransitionTime":"2026-01-04T11:48:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.294555 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.313362 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.334915 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.346677 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.360782 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.374030 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.382691 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.382725 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.382735 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.382751 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.382761 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:34Z","lastTransitionTime":"2026-01-04T11:48:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.389087 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.485467 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.485514 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.485524 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.485542 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.485553 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:34Z","lastTransitionTime":"2026-01-04T11:48:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.589272 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.589352 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.589374 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.589408 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.589438 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:34Z","lastTransitionTime":"2026-01-04T11:48:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.685600 5003 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.701827 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.701875 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.701886 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.701905 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.701918 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:34Z","lastTransitionTime":"2026-01-04T11:48:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.806670 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.806744 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.806756 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.806783 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.806795 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:34Z","lastTransitionTime":"2026-01-04T11:48:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.826166 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.840683 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.862964 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.879039 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.899323 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.908978 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.909042 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.909051 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.909068 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.909078 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:34Z","lastTransitionTime":"2026-01-04T11:48:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.933659 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.946377 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.969979 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:34 crc kubenswrapper[5003]: I0104 11:48:34.989155 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.004487 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:35Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.013171 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.013230 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.013249 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.013275 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.013294 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:35Z","lastTransitionTime":"2026-01-04T11:48:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.022781 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:35Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.035391 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:35Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.050965 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:35Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.071490 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:35Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.092210 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:35Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.115103 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.115151 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.115164 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.115183 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.115196 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:35Z","lastTransitionTime":"2026-01-04T11:48:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.118867 5003 generic.go:334] "Generic (PLEG): container finished" podID="a8de5487-0fcc-4344-a821-e485f3090ecb" containerID="364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c" exitCode=0 Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.118924 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" event={"ID":"a8de5487-0fcc-4344-a821-e485f3090ecb","Type":"ContainerDied","Data":"364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c"} Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.141433 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:35Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.161257 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:35Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.181560 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:35Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.203353 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:35Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.219171 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:35Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.219205 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.219436 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.219461 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.219493 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.219513 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:35Z","lastTransitionTime":"2026-01-04T11:48:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.241520 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:35Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.257699 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:35Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.293678 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:35Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.313062 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:35Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.322880 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.322924 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.322936 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.322955 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.322967 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:35Z","lastTransitionTime":"2026-01-04T11:48:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.330330 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:35Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.351826 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:35Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.371292 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:35Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.386835 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:35Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.403387 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:35Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.416951 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:35Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.425983 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.426057 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.426074 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.426095 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.426111 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:35Z","lastTransitionTime":"2026-01-04T11:48:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.529860 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.529933 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.529952 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.529978 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.529997 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:35Z","lastTransitionTime":"2026-01-04T11:48:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.633699 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.633749 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.633762 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.633783 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.633797 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:35Z","lastTransitionTime":"2026-01-04T11:48:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.737821 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.737883 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.737900 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.737927 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.737946 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:35Z","lastTransitionTime":"2026-01-04T11:48:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.806556 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.806729 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:35 crc kubenswrapper[5003]: E0104 11:48:35.806820 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.806900 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:35 crc kubenswrapper[5003]: E0104 11:48:35.807127 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:48:35 crc kubenswrapper[5003]: E0104 11:48:35.807443 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.841275 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.841334 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.841352 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.841378 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.841400 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:35Z","lastTransitionTime":"2026-01-04T11:48:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.944464 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.944502 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.944512 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.944540 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:35 crc kubenswrapper[5003]: I0104 11:48:35.944551 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:35Z","lastTransitionTime":"2026-01-04T11:48:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.047973 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.048378 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.048595 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.048784 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.048942 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:36Z","lastTransitionTime":"2026-01-04T11:48:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.128971 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" event={"ID":"a8de5487-0fcc-4344-a821-e485f3090ecb","Type":"ContainerStarted","Data":"b5b85db2c1c247bb94afa60d23ada02b9349e9a716de56490ce5c51a76dce9b8"} Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.153097 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.153186 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.153208 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.153238 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.153259 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:36Z","lastTransitionTime":"2026-01-04T11:48:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.256759 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.257099 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.257109 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.257127 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.257140 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:36Z","lastTransitionTime":"2026-01-04T11:48:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.360185 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.360238 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.360251 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.360273 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.360284 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:36Z","lastTransitionTime":"2026-01-04T11:48:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.464245 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.464306 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.464327 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.464350 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.464368 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:36Z","lastTransitionTime":"2026-01-04T11:48:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.569412 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.569454 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.569464 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.569480 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.569490 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:36Z","lastTransitionTime":"2026-01-04T11:48:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.673910 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.674001 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.674071 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.674117 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.674144 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:36Z","lastTransitionTime":"2026-01-04T11:48:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.777737 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.777798 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.777815 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.777842 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.777859 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:36Z","lastTransitionTime":"2026-01-04T11:48:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.881251 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.881312 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.881326 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.881347 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.881363 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:36Z","lastTransitionTime":"2026-01-04T11:48:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.983952 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.984107 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.984129 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.984160 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:36 crc kubenswrapper[5003]: I0104 11:48:36.984179 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:36Z","lastTransitionTime":"2026-01-04T11:48:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.088060 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.088142 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.088168 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.088204 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.088228 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:37Z","lastTransitionTime":"2026-01-04T11:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.146141 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" event={"ID":"e40671d3-61d7-4a50-b4ea-a67e4005fc3f","Type":"ContainerStarted","Data":"366b0f955d79a6a6a88678e57dc0f20ccc62a19ba0622fa71e164c089da18acb"} Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.146855 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.168802 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.187761 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.191431 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.191472 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.191519 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.191546 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.191564 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:37Z","lastTransitionTime":"2026-01-04T11:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.210128 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.210159 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.233978 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.259638 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://366b0f955d79a6a6a88678e57dc0f20ccc62a19ba0622fa71e164c089da18acb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.274172 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.294552 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.295183 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.295428 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.295613 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.295833 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.296056 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:37Z","lastTransitionTime":"2026-01-04T11:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.315229 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.341245 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.368252 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.407518 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.412106 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.412138 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.412147 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.412160 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.412171 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:37Z","lastTransitionTime":"2026-01-04T11:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.445996 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.463894 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.479485 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.492776 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.505123 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.514851 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.514900 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.514917 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.514942 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.514953 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:37Z","lastTransitionTime":"2026-01-04T11:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.520044 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.540846 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.554799 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.570461 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.592631 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://366b0f955d79a6a6a88678e57dc0f20ccc62a19ba0622fa71e164c089da18acb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.604236 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.617632 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.617676 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.617686 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.617701 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.617713 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:37Z","lastTransitionTime":"2026-01-04T11:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.629756 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.647193 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.663548 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.693094 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b85db2c1c247bb94afa60d23ada02b9349e9a716de56490ce5c51a76dce9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.708376 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.725491 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.726222 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.726275 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.726297 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.726321 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.726338 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:37Z","lastTransitionTime":"2026-01-04T11:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.746835 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.761484 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.806308 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.806781 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.806834 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:37 crc kubenswrapper[5003]: E0104 11:48:37.806968 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:48:37 crc kubenswrapper[5003]: E0104 11:48:37.807229 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:48:37 crc kubenswrapper[5003]: E0104 11:48:37.807567 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.818428 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.818485 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.818504 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.818533 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.818553 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:37Z","lastTransitionTime":"2026-01-04T11:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:37 crc kubenswrapper[5003]: E0104 11:48:37.834138 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.838648 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.838701 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.838718 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.838739 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.838768 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:37Z","lastTransitionTime":"2026-01-04T11:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:37 crc kubenswrapper[5003]: E0104 11:48:37.858583 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.864926 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.864975 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.864992 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.865035 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.865057 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:37Z","lastTransitionTime":"2026-01-04T11:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:37 crc kubenswrapper[5003]: E0104 11:48:37.884410 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.894388 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.894414 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.894423 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.894437 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.894448 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:37Z","lastTransitionTime":"2026-01-04T11:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:37 crc kubenswrapper[5003]: E0104 11:48:37.917452 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.923201 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.923256 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.923266 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.923280 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.923291 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:37Z","lastTransitionTime":"2026-01-04T11:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:37 crc kubenswrapper[5003]: E0104 11:48:37.935162 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:37Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:37 crc kubenswrapper[5003]: E0104 11:48:37.935275 5003 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.936883 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.936906 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.936915 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.936929 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:37 crc kubenswrapper[5003]: I0104 11:48:37.936938 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:37Z","lastTransitionTime":"2026-01-04T11:48:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.040183 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.040231 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.040242 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.040258 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.040268 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:38Z","lastTransitionTime":"2026-01-04T11:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.143541 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.143578 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.143588 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.143602 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.143613 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:38Z","lastTransitionTime":"2026-01-04T11:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.149226 5003 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.149404 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.179582 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.191605 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.197842 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:38Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.211333 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:38Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.230452 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:38Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.246912 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.246975 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.246995 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.247049 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.247069 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:38Z","lastTransitionTime":"2026-01-04T11:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.250406 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:38Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.268861 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:38Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.282979 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:38Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.299389 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:38Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.318384 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:38Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.331857 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:38Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.350216 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.350271 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.350289 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.350315 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.350334 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:38Z","lastTransitionTime":"2026-01-04T11:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.362939 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://366b0f955d79a6a6a88678e57dc0f20ccc62a19ba0622fa71e164c089da18acb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:38Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.389183 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:38Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.413829 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:38Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.431223 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:38Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.445313 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:38Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.453285 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.453341 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.453355 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.453375 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.453389 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:38Z","lastTransitionTime":"2026-01-04T11:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.463670 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b85db2c1c247bb94afa60d23ada02b9349e9a716de56490ce5c51a76dce9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:38Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.478816 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:38Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.495589 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:38Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.514876 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:38Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.528095 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:38Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.541308 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:38Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.555323 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.555362 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.555374 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.555391 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.555401 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:38Z","lastTransitionTime":"2026-01-04T11:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.561706 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://366b0f955d79a6a6a88678e57dc0f20ccc62a19ba0622fa71e164c089da18acb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:38Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.572386 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:38Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.589740 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:38Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.602377 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:38Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.614554 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:38Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.634635 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b85db2c1c247bb94afa60d23ada02b9349e9a716de56490ce5c51a76dce9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:38Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.655474 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:38Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.658595 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.658641 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.658651 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.658667 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.658678 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:38Z","lastTransitionTime":"2026-01-04T11:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.671244 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:38Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.688183 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:38Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.704654 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:38Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.762003 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.762085 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.762099 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.762119 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.762132 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:38Z","lastTransitionTime":"2026-01-04T11:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.864398 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.864433 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.864441 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.864463 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.864476 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:38Z","lastTransitionTime":"2026-01-04T11:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.966712 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.966745 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.966755 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.966770 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:38 crc kubenswrapper[5003]: I0104 11:48:38.966781 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:38Z","lastTransitionTime":"2026-01-04T11:48:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.068947 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.068998 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.069038 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.069069 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.069085 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:39Z","lastTransitionTime":"2026-01-04T11:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.156082 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lwxt_e40671d3-61d7-4a50-b4ea-a67e4005fc3f/ovnkube-controller/0.log" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.160355 5003 generic.go:334] "Generic (PLEG): container finished" podID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerID="366b0f955d79a6a6a88678e57dc0f20ccc62a19ba0622fa71e164c089da18acb" exitCode=1 Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.160409 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" event={"ID":"e40671d3-61d7-4a50-b4ea-a67e4005fc3f","Type":"ContainerDied","Data":"366b0f955d79a6a6a88678e57dc0f20ccc62a19ba0622fa71e164c089da18acb"} Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.161657 5003 scope.go:117] "RemoveContainer" containerID="366b0f955d79a6a6a88678e57dc0f20ccc62a19ba0622fa71e164c089da18acb" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.171470 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.171508 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.171516 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.171531 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.171541 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:39Z","lastTransitionTime":"2026-01-04T11:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.182176 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:39Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.200392 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:39Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.225590 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:39Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.242318 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:39Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.262924 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:39Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.274965 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.275034 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.275050 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.275068 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.275082 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:39Z","lastTransitionTime":"2026-01-04T11:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.294060 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://366b0f955d79a6a6a88678e57dc0f20ccc62a19ba0622fa71e164c089da18acb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://366b0f955d79a6a6a88678e57dc0f20ccc62a19ba0622fa71e164c089da18acb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0104 11:48:38.496360 6340 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0104 11:48:38.496419 6340 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0104 11:48:38.496437 6340 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0104 11:48:38.496439 6340 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0104 11:48:38.496454 6340 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0104 11:48:38.496460 6340 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0104 11:48:38.496488 6340 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:48:38.496491 6340 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0104 11:48:38.496500 6340 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0104 11:48:38.496507 6340 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0104 11:48:38.496527 6340 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0104 11:48:38.496533 6340 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0104 11:48:38.496567 6340 handler.go:208] Removed *v1.Node event handler 2\\\\nI0104 11:48:38.496592 6340 factory.go:656] Stopping watch factory\\\\nI0104 11:48:38.496609 6340 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:39Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.308546 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:39Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.329836 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:39Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.350726 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:39Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.367317 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:39Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.379449 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.379484 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.379496 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.379520 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.379544 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:39Z","lastTransitionTime":"2026-01-04T11:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.383564 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b85db2c1c247bb94afa60d23ada02b9349e9a716de56490ce5c51a76dce9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:39Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.397215 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:39Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.414106 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:39Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.431555 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:39Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.450907 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:39Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.483580 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.483647 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.483667 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.483696 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.483715 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:39Z","lastTransitionTime":"2026-01-04T11:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.593117 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.593656 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.593686 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.593719 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.593742 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:39Z","lastTransitionTime":"2026-01-04T11:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.663063 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.663168 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:39 crc kubenswrapper[5003]: E0104 11:48:39.663276 5003 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 04 11:48:39 crc kubenswrapper[5003]: E0104 11:48:39.663284 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:48:55.663250534 +0000 UTC m=+51.136280385 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:48:39 crc kubenswrapper[5003]: E0104 11:48:39.663337 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:55.663317356 +0000 UTC m=+51.136347197 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.663457 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:39 crc kubenswrapper[5003]: E0104 11:48:39.663622 5003 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 04 11:48:39 crc kubenswrapper[5003]: E0104 11:48:39.663689 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:55.663678025 +0000 UTC m=+51.136707886 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.697222 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.697284 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.697302 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.697332 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.697354 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:39Z","lastTransitionTime":"2026-01-04T11:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.764333 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.764445 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:39 crc kubenswrapper[5003]: E0104 11:48:39.764581 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 04 11:48:39 crc kubenswrapper[5003]: E0104 11:48:39.764600 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 04 11:48:39 crc kubenswrapper[5003]: E0104 11:48:39.764612 5003 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:39 crc kubenswrapper[5003]: E0104 11:48:39.764603 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 04 11:48:39 crc kubenswrapper[5003]: E0104 11:48:39.764670 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:55.764655702 +0000 UTC m=+51.237685533 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:39 crc kubenswrapper[5003]: E0104 11:48:39.764671 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 04 11:48:39 crc kubenswrapper[5003]: E0104 11:48:39.764703 5003 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:39 crc kubenswrapper[5003]: E0104 11:48:39.764815 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-04 11:48:55.764779805 +0000 UTC m=+51.237809816 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.800594 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.800627 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.800636 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.800651 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.800662 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:39Z","lastTransitionTime":"2026-01-04T11:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.806472 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.806541 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:39 crc kubenswrapper[5003]: E0104 11:48:39.806591 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.806863 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:39 crc kubenswrapper[5003]: E0104 11:48:39.806845 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:48:39 crc kubenswrapper[5003]: E0104 11:48:39.806956 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.902733 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.902798 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.902817 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.902849 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:39 crc kubenswrapper[5003]: I0104 11:48:39.902870 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:39Z","lastTransitionTime":"2026-01-04T11:48:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.007871 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.007911 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.007921 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.007937 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.007951 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:40Z","lastTransitionTime":"2026-01-04T11:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.110620 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.110652 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.110661 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.110677 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.110691 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:40Z","lastTransitionTime":"2026-01-04T11:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.164849 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lwxt_e40671d3-61d7-4a50-b4ea-a67e4005fc3f/ovnkube-controller/1.log" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.165362 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lwxt_e40671d3-61d7-4a50-b4ea-a67e4005fc3f/ovnkube-controller/0.log" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.168400 5003 generic.go:334] "Generic (PLEG): container finished" podID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerID="391ed134fdda3868329edaff5ae745adf002bf505aaa0765c3da749625f0311a" exitCode=1 Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.168475 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" event={"ID":"e40671d3-61d7-4a50-b4ea-a67e4005fc3f","Type":"ContainerDied","Data":"391ed134fdda3868329edaff5ae745adf002bf505aaa0765c3da749625f0311a"} Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.168527 5003 scope.go:117] "RemoveContainer" containerID="366b0f955d79a6a6a88678e57dc0f20ccc62a19ba0622fa71e164c089da18acb" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.169990 5003 scope.go:117] "RemoveContainer" containerID="391ed134fdda3868329edaff5ae745adf002bf505aaa0765c3da749625f0311a" Jan 04 11:48:40 crc kubenswrapper[5003]: E0104 11:48:40.170184 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2lwxt_openshift-ovn-kubernetes(e40671d3-61d7-4a50-b4ea-a67e4005fc3f)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.185449 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:40Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.197457 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:40Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.213957 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.214006 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.214039 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.214056 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.214067 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:40Z","lastTransitionTime":"2026-01-04T11:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.219325 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b85db2c1c247bb94afa60d23ada02b9349e9a716de56490ce5c51a76dce9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:40Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.239674 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:40Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.255243 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:40Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.268343 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:40Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.280061 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:40Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.288899 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:40Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.298469 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:40Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.307925 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:40Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.316210 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.316260 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.316272 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.316293 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.316307 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:40Z","lastTransitionTime":"2026-01-04T11:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.320127 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:40Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.335049 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:40Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.355410 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://391ed134fdda3868329edaff5ae745adf002bf505aaa0765c3da749625f0311a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://366b0f955d79a6a6a88678e57dc0f20ccc62a19ba0622fa71e164c089da18acb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:39Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0104 11:48:38.496360 6340 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0104 11:48:38.496419 6340 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0104 11:48:38.496437 6340 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0104 11:48:38.496439 6340 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0104 11:48:38.496454 6340 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0104 11:48:38.496460 6340 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0104 11:48:38.496488 6340 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:48:38.496491 6340 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0104 11:48:38.496500 6340 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0104 11:48:38.496507 6340 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0104 11:48:38.496527 6340 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0104 11:48:38.496533 6340 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0104 11:48:38.496567 6340 handler.go:208] Removed *v1.Node event handler 2\\\\nI0104 11:48:38.496592 6340 factory.go:656] Stopping watch factory\\\\nI0104 11:48:38.496609 6340 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:36Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://391ed134fdda3868329edaff5ae745adf002bf505aaa0765c3da749625f0311a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"message\\\":\\\"l\\\\nI0104 11:48:40.118355 6476 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0104 11:48:40.118401 6476 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0104 11:48:40.118422 6476 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:48:40.118442 6476 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:48:40.118844 6476 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0104 11:48:40.119444 6476 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:40.120697 6476 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0104 11:48:40.120778 6476 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0104 11:48:40.120792 6476 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0104 11:48:40.120816 6476 factory.go:656] Stopping watch factory\\\\nI0104 11:48:40.120839 6476 ovnkube.go:599] Stopped ovnkube\\\\nI0104 11:48:40.120899 6476 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0104 11:48:40.120915 6476 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0104 11\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:40Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.369089 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:40Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.382190 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:40Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.418981 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.419005 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.419037 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.419052 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.419061 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:40Z","lastTransitionTime":"2026-01-04T11:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.521282 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.521364 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.521382 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.521404 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.521420 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:40Z","lastTransitionTime":"2026-01-04T11:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.624473 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.624508 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.624516 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.624530 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.624540 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:40Z","lastTransitionTime":"2026-01-04T11:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.727680 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.727716 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.727726 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.727781 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.727794 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:40Z","lastTransitionTime":"2026-01-04T11:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.829810 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.829862 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.829878 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.829898 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.829915 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:40Z","lastTransitionTime":"2026-01-04T11:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.932579 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.932631 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.932646 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.932678 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:40 crc kubenswrapper[5003]: I0104 11:48:40.932693 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:40Z","lastTransitionTime":"2026-01-04T11:48:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.035537 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.035583 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.035592 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.035606 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.035615 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:41Z","lastTransitionTime":"2026-01-04T11:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.138194 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.138240 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.138255 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.138276 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.138292 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:41Z","lastTransitionTime":"2026-01-04T11:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.174084 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lwxt_e40671d3-61d7-4a50-b4ea-a67e4005fc3f/ovnkube-controller/1.log" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.179042 5003 scope.go:117] "RemoveContainer" containerID="391ed134fdda3868329edaff5ae745adf002bf505aaa0765c3da749625f0311a" Jan 04 11:48:41 crc kubenswrapper[5003]: E0104 11:48:41.179192 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2lwxt_openshift-ovn-kubernetes(e40671d3-61d7-4a50-b4ea-a67e4005fc3f)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.197914 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.214615 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.230568 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.241273 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.241324 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.241334 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.241350 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.241360 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:41Z","lastTransitionTime":"2026-01-04T11:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.245498 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.259328 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.277386 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://391ed134fdda3868329edaff5ae745adf002bf505aaa0765c3da749625f0311a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://391ed134fdda3868329edaff5ae745adf002bf505aaa0765c3da749625f0311a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"message\\\":\\\"l\\\\nI0104 11:48:40.118355 6476 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0104 11:48:40.118401 6476 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0104 11:48:40.118422 6476 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:48:40.118442 6476 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:48:40.118844 6476 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0104 11:48:40.119444 6476 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:40.120697 6476 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0104 11:48:40.120778 6476 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0104 11:48:40.120792 6476 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0104 11:48:40.120816 6476 factory.go:656] Stopping watch factory\\\\nI0104 11:48:40.120839 6476 ovnkube.go:599] Stopped ovnkube\\\\nI0104 11:48:40.120899 6476 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0104 11:48:40.120915 6476 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0104 11\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2lwxt_openshift-ovn-kubernetes(e40671d3-61d7-4a50-b4ea-a67e4005fc3f)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.292250 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.324692 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.342575 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.344478 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.344539 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.344554 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.344604 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.344619 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:41Z","lastTransitionTime":"2026-01-04T11:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.363417 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.379847 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b85db2c1c247bb94afa60d23ada02b9349e9a716de56490ce5c51a76dce9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.392957 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.406408 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.418721 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.442395 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.447157 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.447191 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.447200 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.447214 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.447223 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:41Z","lastTransitionTime":"2026-01-04T11:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.549417 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.549482 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.549504 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.549533 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.549555 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:41Z","lastTransitionTime":"2026-01-04T11:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.652077 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.652126 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.652145 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.652168 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.652184 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:41Z","lastTransitionTime":"2026-01-04T11:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.755265 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.755334 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.755352 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.755376 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.755391 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:41Z","lastTransitionTime":"2026-01-04T11:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.806314 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.806408 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:41 crc kubenswrapper[5003]: E0104 11:48:41.806502 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.806566 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:41 crc kubenswrapper[5003]: E0104 11:48:41.806669 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:48:41 crc kubenswrapper[5003]: E0104 11:48:41.806817 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.857763 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.857834 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.857842 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.857859 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.857870 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:41Z","lastTransitionTime":"2026-01-04T11:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.927243 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb"] Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.927759 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.931950 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.931962 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.949949 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.960448 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.960516 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.960529 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.960548 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.960563 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:41Z","lastTransitionTime":"2026-01-04T11:48:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.966193 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:41 crc kubenswrapper[5003]: I0104 11:48:41.982979 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.001900 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:41Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.014652 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.030240 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.055244 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://391ed134fdda3868329edaff5ae745adf002bf505aaa0765c3da749625f0311a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://391ed134fdda3868329edaff5ae745adf002bf505aaa0765c3da749625f0311a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"message\\\":\\\"l\\\\nI0104 11:48:40.118355 6476 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0104 11:48:40.118401 6476 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0104 11:48:40.118422 6476 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:48:40.118442 6476 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:48:40.118844 6476 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0104 11:48:40.119444 6476 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:40.120697 6476 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0104 11:48:40.120778 6476 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0104 11:48:40.120792 6476 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0104 11:48:40.120816 6476 factory.go:656] Stopping watch factory\\\\nI0104 11:48:40.120839 6476 ovnkube.go:599] Stopped ovnkube\\\\nI0104 11:48:40.120899 6476 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0104 11:48:40.120915 6476 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0104 11\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2lwxt_openshift-ovn-kubernetes(e40671d3-61d7-4a50-b4ea-a67e4005fc3f)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.063252 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.063285 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.063295 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.063309 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.063319 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:42Z","lastTransitionTime":"2026-01-04T11:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.076223 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.088403 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvb2d\" (UniqueName: \"kubernetes.io/projected/65e8a928-4671-4299-840a-812a83f36ba1-kube-api-access-cvb2d\") pod \"ovnkube-control-plane-749d76644c-9x5wb\" (UID: \"65e8a928-4671-4299-840a-812a83f36ba1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.088843 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/65e8a928-4671-4299-840a-812a83f36ba1-env-overrides\") pod \"ovnkube-control-plane-749d76644c-9x5wb\" (UID: \"65e8a928-4671-4299-840a-812a83f36ba1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.089080 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/65e8a928-4671-4299-840a-812a83f36ba1-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-9x5wb\" (UID: \"65e8a928-4671-4299-840a-812a83f36ba1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.089326 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/65e8a928-4671-4299-840a-812a83f36ba1-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-9x5wb\" (UID: \"65e8a928-4671-4299-840a-812a83f36ba1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.093293 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65e8a928-4671-4299-840a-812a83f36ba1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9x5wb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.114824 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.136572 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.155469 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.166353 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.166394 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.166402 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.166416 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.166426 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:42Z","lastTransitionTime":"2026-01-04T11:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.174738 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b85db2c1c247bb94afa60d23ada02b9349e9a716de56490ce5c51a76dce9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.190630 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvb2d\" (UniqueName: \"kubernetes.io/projected/65e8a928-4671-4299-840a-812a83f36ba1-kube-api-access-cvb2d\") pod \"ovnkube-control-plane-749d76644c-9x5wb\" (UID: \"65e8a928-4671-4299-840a-812a83f36ba1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.190761 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/65e8a928-4671-4299-840a-812a83f36ba1-env-overrides\") pod \"ovnkube-control-plane-749d76644c-9x5wb\" (UID: \"65e8a928-4671-4299-840a-812a83f36ba1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.190820 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/65e8a928-4671-4299-840a-812a83f36ba1-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-9x5wb\" (UID: \"65e8a928-4671-4299-840a-812a83f36ba1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.190879 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/65e8a928-4671-4299-840a-812a83f36ba1-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-9x5wb\" (UID: \"65e8a928-4671-4299-840a-812a83f36ba1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.191964 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/65e8a928-4671-4299-840a-812a83f36ba1-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-9x5wb\" (UID: \"65e8a928-4671-4299-840a-812a83f36ba1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.192193 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/65e8a928-4671-4299-840a-812a83f36ba1-env-overrides\") pod \"ovnkube-control-plane-749d76644c-9x5wb\" (UID: \"65e8a928-4671-4299-840a-812a83f36ba1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.196766 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/65e8a928-4671-4299-840a-812a83f36ba1-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-9x5wb\" (UID: \"65e8a928-4671-4299-840a-812a83f36ba1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.207813 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.223665 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.228197 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvb2d\" (UniqueName: \"kubernetes.io/projected/65e8a928-4671-4299-840a-812a83f36ba1-kube-api-access-cvb2d\") pod \"ovnkube-control-plane-749d76644c-9x5wb\" (UID: \"65e8a928-4671-4299-840a-812a83f36ba1\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.239792 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.244080 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" Jan 04 11:48:42 crc kubenswrapper[5003]: W0104 11:48:42.260730 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod65e8a928_4671_4299_840a_812a83f36ba1.slice/crio-215b0cb8e3f23c9f19a21805304d0458d48c4606f3d496706378fbbd8f2ee600 WatchSource:0}: Error finding container 215b0cb8e3f23c9f19a21805304d0458d48c4606f3d496706378fbbd8f2ee600: Status 404 returned error can't find the container with id 215b0cb8e3f23c9f19a21805304d0458d48c4606f3d496706378fbbd8f2ee600 Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.268681 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.268876 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.269003 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.269199 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.269315 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:42Z","lastTransitionTime":"2026-01-04T11:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.374842 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.374881 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.374891 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.374907 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.374918 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:42Z","lastTransitionTime":"2026-01-04T11:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.477466 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.477537 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.477548 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.477560 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.477570 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:42Z","lastTransitionTime":"2026-01-04T11:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.581310 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.581349 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.581362 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.581376 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.581388 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:42Z","lastTransitionTime":"2026-01-04T11:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.665809 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-n2zwh"] Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.666440 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:48:42 crc kubenswrapper[5003]: E0104 11:48:42.666522 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.681632 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.685116 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.685157 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.685174 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.685197 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.685214 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:42Z","lastTransitionTime":"2026-01-04T11:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.692075 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65e8a928-4671-4299-840a-812a83f36ba1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9x5wb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.708224 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.719235 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.736662 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.762268 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://391ed134fdda3868329edaff5ae745adf002bf505aaa0765c3da749625f0311a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://391ed134fdda3868329edaff5ae745adf002bf505aaa0765c3da749625f0311a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"message\\\":\\\"l\\\\nI0104 11:48:40.118355 6476 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0104 11:48:40.118401 6476 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0104 11:48:40.118422 6476 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:48:40.118442 6476 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:48:40.118844 6476 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0104 11:48:40.119444 6476 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:40.120697 6476 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0104 11:48:40.120778 6476 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0104 11:48:40.120792 6476 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0104 11:48:40.120816 6476 factory.go:656] Stopping watch factory\\\\nI0104 11:48:40.120839 6476 ovnkube.go:599] Stopped ovnkube\\\\nI0104 11:48:40.120899 6476 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0104 11:48:40.120915 6476 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0104 11\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2lwxt_openshift-ovn-kubernetes(e40671d3-61d7-4a50-b4ea-a67e4005fc3f)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.784923 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.787943 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.787982 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.788066 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.788093 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.788108 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:42Z","lastTransitionTime":"2026-01-04T11:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.796835 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cd6de6ec-2a7c-4842-9d8a-ba4032acb50e-metrics-certs\") pod \"network-metrics-daemon-n2zwh\" (UID: \"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e\") " pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.796889 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9th4p\" (UniqueName: \"kubernetes.io/projected/cd6de6ec-2a7c-4842-9d8a-ba4032acb50e-kube-api-access-9th4p\") pod \"network-metrics-daemon-n2zwh\" (UID: \"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e\") " pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.809085 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.829524 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.847686 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b85db2c1c247bb94afa60d23ada02b9349e9a716de56490ce5c51a76dce9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.872979 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.891410 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.891472 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.891489 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.891513 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.891534 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:42Z","lastTransitionTime":"2026-01-04T11:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.894295 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.897702 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9th4p\" (UniqueName: \"kubernetes.io/projected/cd6de6ec-2a7c-4842-9d8a-ba4032acb50e-kube-api-access-9th4p\") pod \"network-metrics-daemon-n2zwh\" (UID: \"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e\") " pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.897861 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cd6de6ec-2a7c-4842-9d8a-ba4032acb50e-metrics-certs\") pod \"network-metrics-daemon-n2zwh\" (UID: \"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e\") " pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:48:42 crc kubenswrapper[5003]: E0104 11:48:42.898039 5003 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 04 11:48:42 crc kubenswrapper[5003]: E0104 11:48:42.898113 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cd6de6ec-2a7c-4842-9d8a-ba4032acb50e-metrics-certs podName:cd6de6ec-2a7c-4842-9d8a-ba4032acb50e nodeName:}" failed. No retries permitted until 2026-01-04 11:48:43.398090405 +0000 UTC m=+38.871120276 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/cd6de6ec-2a7c-4842-9d8a-ba4032acb50e-metrics-certs") pod "network-metrics-daemon-n2zwh" (UID: "cd6de6ec-2a7c-4842-9d8a-ba4032acb50e") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.915461 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.927442 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9th4p\" (UniqueName: \"kubernetes.io/projected/cd6de6ec-2a7c-4842-9d8a-ba4032acb50e-kube-api-access-9th4p\") pod \"network-metrics-daemon-n2zwh\" (UID: \"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e\") " pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.937928 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.956905 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.975207 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.992061 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2zwh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2zwh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:42Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.994369 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.994434 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.994452 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.994479 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:42 crc kubenswrapper[5003]: I0104 11:48:42.994498 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:42Z","lastTransitionTime":"2026-01-04T11:48:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.097917 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.097983 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.098001 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.098060 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.098080 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:43Z","lastTransitionTime":"2026-01-04T11:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.187839 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" event={"ID":"65e8a928-4671-4299-840a-812a83f36ba1","Type":"ContainerStarted","Data":"215b0cb8e3f23c9f19a21805304d0458d48c4606f3d496706378fbbd8f2ee600"} Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.201230 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.201272 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.201291 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.201310 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.201324 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:43Z","lastTransitionTime":"2026-01-04T11:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.304733 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.305033 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.305045 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.305060 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.305072 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:43Z","lastTransitionTime":"2026-01-04T11:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.402986 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cd6de6ec-2a7c-4842-9d8a-ba4032acb50e-metrics-certs\") pod \"network-metrics-daemon-n2zwh\" (UID: \"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e\") " pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:48:43 crc kubenswrapper[5003]: E0104 11:48:43.403204 5003 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 04 11:48:43 crc kubenswrapper[5003]: E0104 11:48:43.403314 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cd6de6ec-2a7c-4842-9d8a-ba4032acb50e-metrics-certs podName:cd6de6ec-2a7c-4842-9d8a-ba4032acb50e nodeName:}" failed. No retries permitted until 2026-01-04 11:48:44.403289699 +0000 UTC m=+39.876319770 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/cd6de6ec-2a7c-4842-9d8a-ba4032acb50e-metrics-certs") pod "network-metrics-daemon-n2zwh" (UID: "cd6de6ec-2a7c-4842-9d8a-ba4032acb50e") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.407928 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.407981 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.407995 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.408033 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.408049 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:43Z","lastTransitionTime":"2026-01-04T11:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.510909 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.510955 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.510966 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.510985 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.510995 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:43Z","lastTransitionTime":"2026-01-04T11:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.613966 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.614332 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.614461 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.614606 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.614700 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:43Z","lastTransitionTime":"2026-01-04T11:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.717960 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.718060 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.718085 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.718121 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.718175 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:43Z","lastTransitionTime":"2026-01-04T11:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.806268 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.806358 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:43 crc kubenswrapper[5003]: E0104 11:48:43.806487 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:48:43 crc kubenswrapper[5003]: E0104 11:48:43.806619 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.806820 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:43 crc kubenswrapper[5003]: E0104 11:48:43.807140 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.820685 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.820752 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.820769 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.820793 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.820811 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:43Z","lastTransitionTime":"2026-01-04T11:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.924090 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.924452 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.924609 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.924748 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:43 crc kubenswrapper[5003]: I0104 11:48:43.924886 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:43Z","lastTransitionTime":"2026-01-04T11:48:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.028069 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.028136 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.028156 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.028181 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.028198 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:44Z","lastTransitionTime":"2026-01-04T11:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.131506 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.131589 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.131614 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.131647 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.131672 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:44Z","lastTransitionTime":"2026-01-04T11:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.195910 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" event={"ID":"65e8a928-4671-4299-840a-812a83f36ba1","Type":"ContainerStarted","Data":"9e78602107a0c94d79560902c8a3636e32998d67b852202fe4329ddea3c93d4b"} Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.195984 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" event={"ID":"65e8a928-4671-4299-840a-812a83f36ba1","Type":"ContainerStarted","Data":"bfebfbfc8fadf934d7b9ee31dfcf4533f17e01c47d9a96ad86de85c19f1bb983"} Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.220082 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.234830 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.234863 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.234873 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.234889 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.234899 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:44Z","lastTransitionTime":"2026-01-04T11:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.244048 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.265126 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.284304 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b85db2c1c247bb94afa60d23ada02b9349e9a716de56490ce5c51a76dce9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.301721 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.319829 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.338347 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.338413 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.338431 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.338458 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.338477 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:44Z","lastTransitionTime":"2026-01-04T11:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.341322 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.353429 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.369203 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.393254 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.410319 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2zwh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2zwh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.414449 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cd6de6ec-2a7c-4842-9d8a-ba4032acb50e-metrics-certs\") pod \"network-metrics-daemon-n2zwh\" (UID: \"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e\") " pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:48:44 crc kubenswrapper[5003]: E0104 11:48:44.414722 5003 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 04 11:48:44 crc kubenswrapper[5003]: E0104 11:48:44.414811 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cd6de6ec-2a7c-4842-9d8a-ba4032acb50e-metrics-certs podName:cd6de6ec-2a7c-4842-9d8a-ba4032acb50e nodeName:}" failed. No retries permitted until 2026-01-04 11:48:46.414788065 +0000 UTC m=+41.887817946 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/cd6de6ec-2a7c-4842-9d8a-ba4032acb50e-metrics-certs") pod "network-metrics-daemon-n2zwh" (UID: "cd6de6ec-2a7c-4842-9d8a-ba4032acb50e") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.427208 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.442251 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.442314 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.442333 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.442358 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.442376 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:44Z","lastTransitionTime":"2026-01-04T11:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.448509 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.461126 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.479687 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://391ed134fdda3868329edaff5ae745adf002bf505aaa0765c3da749625f0311a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://391ed134fdda3868329edaff5ae745adf002bf505aaa0765c3da749625f0311a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"message\\\":\\\"l\\\\nI0104 11:48:40.118355 6476 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0104 11:48:40.118401 6476 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0104 11:48:40.118422 6476 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:48:40.118442 6476 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:48:40.118844 6476 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0104 11:48:40.119444 6476 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:40.120697 6476 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0104 11:48:40.120778 6476 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0104 11:48:40.120792 6476 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0104 11:48:40.120816 6476 factory.go:656] Stopping watch factory\\\\nI0104 11:48:40.120839 6476 ovnkube.go:599] Stopped ovnkube\\\\nI0104 11:48:40.120899 6476 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0104 11:48:40.120915 6476 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0104 11\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2lwxt_openshift-ovn-kubernetes(e40671d3-61d7-4a50-b4ea-a67e4005fc3f)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.490052 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.501636 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65e8a928-4671-4299-840a-812a83f36ba1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e78602107a0c94d79560902c8a3636e32998d67b852202fe4329ddea3c93d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfebfbfc8fadf934d7b9ee31dfcf4533f17e01c47d9a96ad86de85c19f1bb983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9x5wb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.545288 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.545338 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.545353 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.545371 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.545386 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:44Z","lastTransitionTime":"2026-01-04T11:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.648391 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.648448 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.648462 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.648482 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.648498 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:44Z","lastTransitionTime":"2026-01-04T11:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.754590 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.754962 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.755178 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.755351 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.755510 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:44Z","lastTransitionTime":"2026-01-04T11:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.806230 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:48:44 crc kubenswrapper[5003]: E0104 11:48:44.806570 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.821817 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.843846 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.858704 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.858826 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.858850 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.858874 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.858891 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:44Z","lastTransitionTime":"2026-01-04T11:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.866559 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.883658 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.904471 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.921653 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.940812 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2zwh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2zwh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.958126 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65e8a928-4671-4299-840a-812a83f36ba1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e78602107a0c94d79560902c8a3636e32998d67b852202fe4329ddea3c93d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfebfbfc8fadf934d7b9ee31dfcf4533f17e01c47d9a96ad86de85c19f1bb983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9x5wb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.960854 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.960900 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.960916 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.960940 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.960958 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:44Z","lastTransitionTime":"2026-01-04T11:48:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:44 crc kubenswrapper[5003]: I0104 11:48:44.979270 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.000293 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.014881 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.036362 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://391ed134fdda3868329edaff5ae745adf002bf505aaa0765c3da749625f0311a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://391ed134fdda3868329edaff5ae745adf002bf505aaa0765c3da749625f0311a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"message\\\":\\\"l\\\\nI0104 11:48:40.118355 6476 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0104 11:48:40.118401 6476 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0104 11:48:40.118422 6476 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:48:40.118442 6476 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:48:40.118844 6476 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0104 11:48:40.119444 6476 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:40.120697 6476 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0104 11:48:40.120778 6476 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0104 11:48:40.120792 6476 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0104 11:48:40.120816 6476 factory.go:656] Stopping watch factory\\\\nI0104 11:48:40.120839 6476 ovnkube.go:599] Stopped ovnkube\\\\nI0104 11:48:40.120899 6476 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0104 11:48:40.120915 6476 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0104 11\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2lwxt_openshift-ovn-kubernetes(e40671d3-61d7-4a50-b4ea-a67e4005fc3f)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.051768 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.064218 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.064258 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.064273 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.064293 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.064307 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:45Z","lastTransitionTime":"2026-01-04T11:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.073191 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.088959 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.108350 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.134347 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b85db2c1c247bb94afa60d23ada02b9349e9a716de56490ce5c51a76dce9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.166984 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.167058 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.167072 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.167089 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.167102 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:45Z","lastTransitionTime":"2026-01-04T11:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.270556 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.270654 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.270681 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.270715 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.270735 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:45Z","lastTransitionTime":"2026-01-04T11:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.374522 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.374605 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.374631 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.374675 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.374700 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:45Z","lastTransitionTime":"2026-01-04T11:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.477165 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.477240 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.477259 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.477285 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.477307 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:45Z","lastTransitionTime":"2026-01-04T11:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.580836 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.580880 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.580895 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.580913 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.580928 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:45Z","lastTransitionTime":"2026-01-04T11:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.683981 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.684090 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.684109 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.684131 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.684150 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:45Z","lastTransitionTime":"2026-01-04T11:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.787175 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.787238 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.787255 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.787278 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.787297 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:45Z","lastTransitionTime":"2026-01-04T11:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.806314 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:45 crc kubenswrapper[5003]: E0104 11:48:45.806422 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.806510 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.806540 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:45 crc kubenswrapper[5003]: E0104 11:48:45.806699 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:48:45 crc kubenswrapper[5003]: E0104 11:48:45.806801 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.891093 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.891159 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.891178 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.891205 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.891223 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:45Z","lastTransitionTime":"2026-01-04T11:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.994223 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.994276 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.994293 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.994316 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:45 crc kubenswrapper[5003]: I0104 11:48:45.994333 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:45Z","lastTransitionTime":"2026-01-04T11:48:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.097254 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.097303 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.097315 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.097332 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.097345 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:46Z","lastTransitionTime":"2026-01-04T11:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.200291 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.200352 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.200372 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.200403 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.200425 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:46Z","lastTransitionTime":"2026-01-04T11:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.304391 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.304476 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.304496 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.304523 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.304540 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:46Z","lastTransitionTime":"2026-01-04T11:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.407836 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.407897 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.407914 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.407942 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.407958 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:46Z","lastTransitionTime":"2026-01-04T11:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.437122 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cd6de6ec-2a7c-4842-9d8a-ba4032acb50e-metrics-certs\") pod \"network-metrics-daemon-n2zwh\" (UID: \"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e\") " pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:48:46 crc kubenswrapper[5003]: E0104 11:48:46.437392 5003 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 04 11:48:46 crc kubenswrapper[5003]: E0104 11:48:46.437506 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cd6de6ec-2a7c-4842-9d8a-ba4032acb50e-metrics-certs podName:cd6de6ec-2a7c-4842-9d8a-ba4032acb50e nodeName:}" failed. No retries permitted until 2026-01-04 11:48:50.437477851 +0000 UTC m=+45.910507732 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/cd6de6ec-2a7c-4842-9d8a-ba4032acb50e-metrics-certs") pod "network-metrics-daemon-n2zwh" (UID: "cd6de6ec-2a7c-4842-9d8a-ba4032acb50e") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.511759 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.511824 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.511841 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.511864 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.511889 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:46Z","lastTransitionTime":"2026-01-04T11:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.615964 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.616054 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.616079 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.616106 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.616123 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:46Z","lastTransitionTime":"2026-01-04T11:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.719736 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.719817 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.719840 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.719869 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.719889 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:46Z","lastTransitionTime":"2026-01-04T11:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.805778 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:48:46 crc kubenswrapper[5003]: E0104 11:48:46.806072 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.822363 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.822442 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.822469 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.822497 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.822520 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:46Z","lastTransitionTime":"2026-01-04T11:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.926091 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.926178 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.926198 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.926665 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:46 crc kubenswrapper[5003]: I0104 11:48:46.926726 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:46Z","lastTransitionTime":"2026-01-04T11:48:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.030891 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.030969 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.030987 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.031469 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.031733 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:47Z","lastTransitionTime":"2026-01-04T11:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.141161 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.141234 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.141251 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.141277 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.141299 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:47Z","lastTransitionTime":"2026-01-04T11:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.244860 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.244940 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.244959 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.244984 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.245001 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:47Z","lastTransitionTime":"2026-01-04T11:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.348614 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.348672 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.348692 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.348715 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.348735 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:47Z","lastTransitionTime":"2026-01-04T11:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.452186 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.452242 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.452259 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.452285 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.452302 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:47Z","lastTransitionTime":"2026-01-04T11:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.556317 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.556393 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.556411 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.556435 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.556453 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:47Z","lastTransitionTime":"2026-01-04T11:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.660278 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.660351 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.660375 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.660417 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.660443 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:47Z","lastTransitionTime":"2026-01-04T11:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.763202 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.763268 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.763293 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.763321 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.763344 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:47Z","lastTransitionTime":"2026-01-04T11:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.806665 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.806780 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.806888 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:47 crc kubenswrapper[5003]: E0104 11:48:47.806890 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:48:47 crc kubenswrapper[5003]: E0104 11:48:47.807121 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:48:47 crc kubenswrapper[5003]: E0104 11:48:47.807279 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.865922 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.866004 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.866044 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.866063 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.866075 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:47Z","lastTransitionTime":"2026-01-04T11:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.968931 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.968984 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.968995 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.969033 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:47 crc kubenswrapper[5003]: I0104 11:48:47.969048 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:47Z","lastTransitionTime":"2026-01-04T11:48:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.072382 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.072439 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.072461 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.072488 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.072507 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:48Z","lastTransitionTime":"2026-01-04T11:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.127008 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.127130 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.127154 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.127182 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.127205 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:48Z","lastTransitionTime":"2026-01-04T11:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:48 crc kubenswrapper[5003]: E0104 11:48:48.149843 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:48Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.156360 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.156426 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.156443 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.156468 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.156486 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:48Z","lastTransitionTime":"2026-01-04T11:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:48 crc kubenswrapper[5003]: E0104 11:48:48.192852 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:48Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.198387 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.198454 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.198476 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.198506 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.198526 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:48Z","lastTransitionTime":"2026-01-04T11:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:48 crc kubenswrapper[5003]: E0104 11:48:48.214971 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:48Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.220720 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.220785 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.220807 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.220834 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.220853 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:48Z","lastTransitionTime":"2026-01-04T11:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:48 crc kubenswrapper[5003]: E0104 11:48:48.242620 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:48Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.248155 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.248211 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.248222 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.248239 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.248250 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:48Z","lastTransitionTime":"2026-01-04T11:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:48 crc kubenswrapper[5003]: E0104 11:48:48.275855 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:48Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:48 crc kubenswrapper[5003]: E0104 11:48:48.275982 5003 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.277722 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.277771 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.277779 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.277794 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.277806 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:48Z","lastTransitionTime":"2026-01-04T11:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.381300 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.381373 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.381392 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.381891 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.381952 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:48Z","lastTransitionTime":"2026-01-04T11:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.485767 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.485807 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.485823 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.485844 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.485860 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:48Z","lastTransitionTime":"2026-01-04T11:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.588912 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.588960 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.588976 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.588990 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.589000 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:48Z","lastTransitionTime":"2026-01-04T11:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.691127 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.691167 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.691176 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.691190 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.691201 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:48Z","lastTransitionTime":"2026-01-04T11:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.793714 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.794072 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.794138 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.794200 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.794256 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:48Z","lastTransitionTime":"2026-01-04T11:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.806497 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:48:48 crc kubenswrapper[5003]: E0104 11:48:48.806667 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.896821 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.896878 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.896890 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.896909 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.896924 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:48Z","lastTransitionTime":"2026-01-04T11:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.999712 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.999772 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:48 crc kubenswrapper[5003]: I0104 11:48:48.999791 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:48.999820 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:48.999841 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:48Z","lastTransitionTime":"2026-01-04T11:48:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.103371 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.103411 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.103421 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.103437 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.103449 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:49Z","lastTransitionTime":"2026-01-04T11:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.207129 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.207556 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.207713 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.207858 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.207993 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:49Z","lastTransitionTime":"2026-01-04T11:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.312506 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.312581 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.312598 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.312620 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.312639 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:49Z","lastTransitionTime":"2026-01-04T11:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.417262 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.417327 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.417355 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.417386 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.417408 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:49Z","lastTransitionTime":"2026-01-04T11:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.521415 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.521495 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.521519 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.521543 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.521559 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:49Z","lastTransitionTime":"2026-01-04T11:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.625304 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.625373 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.625393 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.625413 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.625426 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:49Z","lastTransitionTime":"2026-01-04T11:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.729408 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.729471 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.729489 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.729513 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.729531 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:49Z","lastTransitionTime":"2026-01-04T11:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.806046 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.806134 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:49 crc kubenswrapper[5003]: E0104 11:48:49.806166 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.806134 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:49 crc kubenswrapper[5003]: E0104 11:48:49.806344 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:48:49 crc kubenswrapper[5003]: E0104 11:48:49.806503 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.833386 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.833629 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.833720 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.833789 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.833847 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:49Z","lastTransitionTime":"2026-01-04T11:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.936133 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.936461 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.936538 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.936613 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:49 crc kubenswrapper[5003]: I0104 11:48:49.936695 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:49Z","lastTransitionTime":"2026-01-04T11:48:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.038984 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.039046 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.039058 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.039074 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.039085 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:50Z","lastTransitionTime":"2026-01-04T11:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.141606 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.141673 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.141696 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.141729 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.141758 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:50Z","lastTransitionTime":"2026-01-04T11:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.244559 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.244611 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.244621 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.244636 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.244646 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:50Z","lastTransitionTime":"2026-01-04T11:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.347518 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.348212 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.348302 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.348341 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.348362 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:50Z","lastTransitionTime":"2026-01-04T11:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.451053 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.451104 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.451116 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.451133 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.451144 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:50Z","lastTransitionTime":"2026-01-04T11:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.478951 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cd6de6ec-2a7c-4842-9d8a-ba4032acb50e-metrics-certs\") pod \"network-metrics-daemon-n2zwh\" (UID: \"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e\") " pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:48:50 crc kubenswrapper[5003]: E0104 11:48:50.479137 5003 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 04 11:48:50 crc kubenswrapper[5003]: E0104 11:48:50.479213 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cd6de6ec-2a7c-4842-9d8a-ba4032acb50e-metrics-certs podName:cd6de6ec-2a7c-4842-9d8a-ba4032acb50e nodeName:}" failed. No retries permitted until 2026-01-04 11:48:58.479192027 +0000 UTC m=+53.952221868 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/cd6de6ec-2a7c-4842-9d8a-ba4032acb50e-metrics-certs") pod "network-metrics-daemon-n2zwh" (UID: "cd6de6ec-2a7c-4842-9d8a-ba4032acb50e") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.556038 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.556074 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.556083 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.556097 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.556107 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:50Z","lastTransitionTime":"2026-01-04T11:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.659178 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.659234 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.659248 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.659261 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.659271 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:50Z","lastTransitionTime":"2026-01-04T11:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.762453 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.762530 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.762550 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.762582 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.762601 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:50Z","lastTransitionTime":"2026-01-04T11:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.806663 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:48:50 crc kubenswrapper[5003]: E0104 11:48:50.806921 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.865721 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.865758 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.865768 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.865780 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.865790 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:50Z","lastTransitionTime":"2026-01-04T11:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.968451 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.968515 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.968536 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.968565 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:50 crc kubenswrapper[5003]: I0104 11:48:50.968584 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:50Z","lastTransitionTime":"2026-01-04T11:48:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.072060 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.072114 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.072132 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.072164 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.072186 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:51Z","lastTransitionTime":"2026-01-04T11:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.175329 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.175425 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.175450 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.175490 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.175518 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:51Z","lastTransitionTime":"2026-01-04T11:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.278955 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.279078 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.279100 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.279212 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.279241 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:51Z","lastTransitionTime":"2026-01-04T11:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.382036 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.382098 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.382119 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.382144 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.382161 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:51Z","lastTransitionTime":"2026-01-04T11:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.485744 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.485827 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.485851 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.485886 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.485910 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:51Z","lastTransitionTime":"2026-01-04T11:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.589295 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.589384 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.589403 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.589432 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.589452 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:51Z","lastTransitionTime":"2026-01-04T11:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.693459 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.693538 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.693558 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.693586 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.693607 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:51Z","lastTransitionTime":"2026-01-04T11:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.796685 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.796738 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.796757 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.796786 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.796806 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:51Z","lastTransitionTime":"2026-01-04T11:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.806105 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.806143 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.806238 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:51 crc kubenswrapper[5003]: E0104 11:48:51.806281 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:48:51 crc kubenswrapper[5003]: E0104 11:48:51.806470 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:48:51 crc kubenswrapper[5003]: E0104 11:48:51.806616 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.899993 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.900092 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.900117 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.900151 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:51 crc kubenswrapper[5003]: I0104 11:48:51.900175 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:51Z","lastTransitionTime":"2026-01-04T11:48:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.002682 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.002713 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.002721 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.002737 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.002747 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:52Z","lastTransitionTime":"2026-01-04T11:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.106572 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.106618 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.106627 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.106643 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.106656 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:52Z","lastTransitionTime":"2026-01-04T11:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.209533 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.209584 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.209598 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.209619 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.209633 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:52Z","lastTransitionTime":"2026-01-04T11:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.312949 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.313071 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.313097 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.313125 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.313148 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:52Z","lastTransitionTime":"2026-01-04T11:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.415295 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.415353 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.415369 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.415388 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.415400 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:52Z","lastTransitionTime":"2026-01-04T11:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.518226 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.518308 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.518337 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.518365 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.518389 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:52Z","lastTransitionTime":"2026-01-04T11:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.622243 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.622299 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.622307 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.622323 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.622336 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:52Z","lastTransitionTime":"2026-01-04T11:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.726739 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.726829 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.726850 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.726885 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.726906 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:52Z","lastTransitionTime":"2026-01-04T11:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.805952 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:48:52 crc kubenswrapper[5003]: E0104 11:48:52.806379 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.830626 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.830700 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.830721 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.830817 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.830842 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:52Z","lastTransitionTime":"2026-01-04T11:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.934234 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.934307 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.934328 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.934362 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:52 crc kubenswrapper[5003]: I0104 11:48:52.934385 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:52Z","lastTransitionTime":"2026-01-04T11:48:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.038463 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.038529 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.038552 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.038583 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.038610 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:53Z","lastTransitionTime":"2026-01-04T11:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.142149 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.142200 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.142216 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.142237 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.142328 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:53Z","lastTransitionTime":"2026-01-04T11:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.244729 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.244787 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.244804 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.244826 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.244844 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:53Z","lastTransitionTime":"2026-01-04T11:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.348109 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.348170 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.348188 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.348213 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.348231 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:53Z","lastTransitionTime":"2026-01-04T11:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.451826 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.451887 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.451909 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.451934 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.451952 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:53Z","lastTransitionTime":"2026-01-04T11:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.555462 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.555495 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.555506 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.555526 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.555538 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:53Z","lastTransitionTime":"2026-01-04T11:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.659166 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.659229 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.659250 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.659277 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.659299 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:53Z","lastTransitionTime":"2026-01-04T11:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.762398 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.762474 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.762498 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.762781 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.762834 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:53Z","lastTransitionTime":"2026-01-04T11:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.806499 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.806544 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.806595 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:53 crc kubenswrapper[5003]: E0104 11:48:53.806704 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:48:53 crc kubenswrapper[5003]: E0104 11:48:53.806893 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:48:53 crc kubenswrapper[5003]: E0104 11:48:53.807007 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.808118 5003 scope.go:117] "RemoveContainer" containerID="391ed134fdda3868329edaff5ae745adf002bf505aaa0765c3da749625f0311a" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.872331 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.872374 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.872386 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.872402 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.872415 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:53Z","lastTransitionTime":"2026-01-04T11:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.976040 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.976440 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.976460 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.976487 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:53 crc kubenswrapper[5003]: I0104 11:48:53.976506 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:53Z","lastTransitionTime":"2026-01-04T11:48:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.080118 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.080192 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.080215 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.080240 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.080257 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:54Z","lastTransitionTime":"2026-01-04T11:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.183439 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.183510 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.183533 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.183564 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.183586 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:54Z","lastTransitionTime":"2026-01-04T11:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.235588 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lwxt_e40671d3-61d7-4a50-b4ea-a67e4005fc3f/ovnkube-controller/1.log" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.239277 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" event={"ID":"e40671d3-61d7-4a50-b4ea-a67e4005fc3f","Type":"ContainerStarted","Data":"39af6158a1af1262431fbc1be2e9b35acdd75e4db94250610454a284dfab69c4"} Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.239421 5003 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.256943 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.272668 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.286376 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.286423 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.286437 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.286453 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.286464 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:54Z","lastTransitionTime":"2026-01-04T11:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.286928 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b85db2c1c247bb94afa60d23ada02b9349e9a716de56490ce5c51a76dce9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.304233 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.317617 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.331221 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.347808 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.367198 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.380978 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.389100 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.389132 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.389142 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.389158 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.389170 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:54Z","lastTransitionTime":"2026-01-04T11:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.398197 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2zwh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2zwh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.507474 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.510382 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.510420 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.510430 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.510448 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.510457 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:54Z","lastTransitionTime":"2026-01-04T11:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.520997 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.537488 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.552254 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.568029 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39af6158a1af1262431fbc1be2e9b35acdd75e4db94250610454a284dfab69c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://391ed134fdda3868329edaff5ae745adf002bf505aaa0765c3da749625f0311a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"message\\\":\\\"l\\\\nI0104 11:48:40.118355 6476 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0104 11:48:40.118401 6476 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0104 11:48:40.118422 6476 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:48:40.118442 6476 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:48:40.118844 6476 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0104 11:48:40.119444 6476 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:40.120697 6476 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0104 11:48:40.120778 6476 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0104 11:48:40.120792 6476 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0104 11:48:40.120816 6476 factory.go:656] Stopping watch factory\\\\nI0104 11:48:40.120839 6476 ovnkube.go:599] Stopped ovnkube\\\\nI0104 11:48:40.120899 6476 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0104 11:48:40.120915 6476 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0104 11\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.577142 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.587784 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65e8a928-4671-4299-840a-812a83f36ba1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e78602107a0c94d79560902c8a3636e32998d67b852202fe4329ddea3c93d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfebfbfc8fadf934d7b9ee31dfcf4533f17e01c47d9a96ad86de85c19f1bb983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9x5wb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.603549 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.612574 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.612601 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.612610 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.612625 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.612635 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:54Z","lastTransitionTime":"2026-01-04T11:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.715176 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.715235 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.715248 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.715273 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.715290 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:54Z","lastTransitionTime":"2026-01-04T11:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.806350 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:48:54 crc kubenswrapper[5003]: E0104 11:48:54.806502 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.817739 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.817796 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.817809 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.817825 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.817835 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:54Z","lastTransitionTime":"2026-01-04T11:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.821550 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.832994 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65e8a928-4671-4299-840a-812a83f36ba1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e78602107a0c94d79560902c8a3636e32998d67b852202fe4329ddea3c93d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfebfbfc8fadf934d7b9ee31dfcf4533f17e01c47d9a96ad86de85c19f1bb983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9x5wb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.846486 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.896452 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.919900 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.919956 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.919966 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.919981 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.919992 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:54Z","lastTransitionTime":"2026-01-04T11:48:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.934348 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.955406 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39af6158a1af1262431fbc1be2e9b35acdd75e4db94250610454a284dfab69c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://391ed134fdda3868329edaff5ae745adf002bf505aaa0765c3da749625f0311a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"message\\\":\\\"l\\\\nI0104 11:48:40.118355 6476 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0104 11:48:40.118401 6476 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0104 11:48:40.118422 6476 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:48:40.118442 6476 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:48:40.118844 6476 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0104 11:48:40.119444 6476 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:40.120697 6476 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0104 11:48:40.120778 6476 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0104 11:48:40.120792 6476 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0104 11:48:40.120816 6476 factory.go:656] Stopping watch factory\\\\nI0104 11:48:40.120839 6476 ovnkube.go:599] Stopped ovnkube\\\\nI0104 11:48:40.120899 6476 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0104 11:48:40.120915 6476 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0104 11\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.974562 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.986906 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:54 crc kubenswrapper[5003]: I0104 11:48:54.996940 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:54Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.009709 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b85db2c1c247bb94afa60d23ada02b9349e9a716de56490ce5c51a76dce9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.021759 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.022458 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.022493 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.022505 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.022526 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.022537 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:55Z","lastTransitionTime":"2026-01-04T11:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.034508 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.045154 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.054597 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.056958 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.070096 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.082289 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.096872 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.108985 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2zwh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2zwh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.123375 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b85db2c1c247bb94afa60d23ada02b9349e9a716de56490ce5c51a76dce9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.125349 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.125412 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.125433 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.125459 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.125477 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:55Z","lastTransitionTime":"2026-01-04T11:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.146749 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.163641 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.184771 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.198958 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.213585 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.226497 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.229076 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.229118 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.229129 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.229146 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.229159 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:55Z","lastTransitionTime":"2026-01-04T11:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.245290 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01ede350-1819-4157-b857-c68a6a3457dd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2096947ed145a7b9482af4491217b3657fbbc74654790676a06f70c938e59d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a519008319f92603b4fd769aa06e46ece0161210016ae942dde01d828e4153fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b01101e6a58ad97c5626e5e6b439ac29a0c28384bfc9027b58bc7713942206d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b0363ec1bdedddf0eca77d19de840253fdec6bfacc0ba2a2dde00536288c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19b0363ec1bdedddf0eca77d19de840253fdec6bfacc0ba2a2dde00536288c5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.258845 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.275172 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.288898 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.305569 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2zwh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2zwh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.326501 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39af6158a1af1262431fbc1be2e9b35acdd75e4db94250610454a284dfab69c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://391ed134fdda3868329edaff5ae745adf002bf505aaa0765c3da749625f0311a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"message\\\":\\\"l\\\\nI0104 11:48:40.118355 6476 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0104 11:48:40.118401 6476 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0104 11:48:40.118422 6476 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:48:40.118442 6476 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:48:40.118844 6476 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0104 11:48:40.119444 6476 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:40.120697 6476 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0104 11:48:40.120778 6476 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0104 11:48:40.120792 6476 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0104 11:48:40.120816 6476 factory.go:656] Stopping watch factory\\\\nI0104 11:48:40.120839 6476 ovnkube.go:599] Stopped ovnkube\\\\nI0104 11:48:40.120899 6476 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0104 11:48:40.120915 6476 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0104 11\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.331182 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.331206 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.331216 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.331231 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.331245 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:55Z","lastTransitionTime":"2026-01-04T11:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.344110 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.356616 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65e8a928-4671-4299-840a-812a83f36ba1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e78602107a0c94d79560902c8a3636e32998d67b852202fe4329ddea3c93d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfebfbfc8fadf934d7b9ee31dfcf4533f17e01c47d9a96ad86de85c19f1bb983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9x5wb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.369446 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.397946 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.421770 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:55Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.435771 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.435859 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.435878 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.435910 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.435930 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:55Z","lastTransitionTime":"2026-01-04T11:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.540078 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.540157 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.540182 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.540217 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.540242 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:55Z","lastTransitionTime":"2026-01-04T11:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.643598 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.643668 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.643685 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.643710 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.643728 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:55Z","lastTransitionTime":"2026-01-04T11:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.720583 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.720834 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.720904 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:55 crc kubenswrapper[5003]: E0104 11:48:55.721181 5003 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 04 11:48:55 crc kubenswrapper[5003]: E0104 11:48:55.721282 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-04 11:49:27.721250751 +0000 UTC m=+83.194280632 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 04 11:48:55 crc kubenswrapper[5003]: E0104 11:48:55.721714 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:49:27.721689942 +0000 UTC m=+83.194719823 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:48:55 crc kubenswrapper[5003]: E0104 11:48:55.721906 5003 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 04 11:48:55 crc kubenswrapper[5003]: E0104 11:48:55.721993 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-04 11:49:27.72197494 +0000 UTC m=+83.195004831 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.747272 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.747315 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.747332 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.747354 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.747373 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:55Z","lastTransitionTime":"2026-01-04T11:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.806349 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:55 crc kubenswrapper[5003]: E0104 11:48:55.806636 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.807008 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:55 crc kubenswrapper[5003]: E0104 11:48:55.807229 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.807437 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:55 crc kubenswrapper[5003]: E0104 11:48:55.807675 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.822186 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.822285 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:55 crc kubenswrapper[5003]: E0104 11:48:55.822460 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 04 11:48:55 crc kubenswrapper[5003]: E0104 11:48:55.822484 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 04 11:48:55 crc kubenswrapper[5003]: E0104 11:48:55.822502 5003 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:55 crc kubenswrapper[5003]: E0104 11:48:55.822561 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-04 11:49:27.822541926 +0000 UTC m=+83.295571777 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:55 crc kubenswrapper[5003]: E0104 11:48:55.822723 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 04 11:48:55 crc kubenswrapper[5003]: E0104 11:48:55.822770 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 04 11:48:55 crc kubenswrapper[5003]: E0104 11:48:55.822792 5003 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:55 crc kubenswrapper[5003]: E0104 11:48:55.822888 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-04 11:49:27.822852004 +0000 UTC m=+83.295882025 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.850301 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.850356 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.850376 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.850404 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.850424 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:55Z","lastTransitionTime":"2026-01-04T11:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.953528 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.953603 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.953621 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.953650 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:55 crc kubenswrapper[5003]: I0104 11:48:55.953673 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:55Z","lastTransitionTime":"2026-01-04T11:48:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.057789 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.057895 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.057915 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.057958 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.057998 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:56Z","lastTransitionTime":"2026-01-04T11:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.161560 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.161618 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.161636 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.161665 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.161686 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:56Z","lastTransitionTime":"2026-01-04T11:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.250410 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lwxt_e40671d3-61d7-4a50-b4ea-a67e4005fc3f/ovnkube-controller/2.log" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.252659 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lwxt_e40671d3-61d7-4a50-b4ea-a67e4005fc3f/ovnkube-controller/1.log" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.257589 5003 generic.go:334] "Generic (PLEG): container finished" podID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerID="39af6158a1af1262431fbc1be2e9b35acdd75e4db94250610454a284dfab69c4" exitCode=1 Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.257647 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" event={"ID":"e40671d3-61d7-4a50-b4ea-a67e4005fc3f","Type":"ContainerDied","Data":"39af6158a1af1262431fbc1be2e9b35acdd75e4db94250610454a284dfab69c4"} Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.257814 5003 scope.go:117] "RemoveContainer" containerID="391ed134fdda3868329edaff5ae745adf002bf505aaa0765c3da749625f0311a" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.259150 5003 scope.go:117] "RemoveContainer" containerID="39af6158a1af1262431fbc1be2e9b35acdd75e4db94250610454a284dfab69c4" Jan 04 11:48:56 crc kubenswrapper[5003]: E0104 11:48:56.259478 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2lwxt_openshift-ovn-kubernetes(e40671d3-61d7-4a50-b4ea-a67e4005fc3f)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.264872 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.264950 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.264968 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.265101 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.265122 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:56Z","lastTransitionTime":"2026-01-04T11:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.282524 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.299487 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2zwh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2zwh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.320945 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.341430 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.367380 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.368914 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.368973 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.368994 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.369231 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.369252 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:56Z","lastTransitionTime":"2026-01-04T11:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.399676 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39af6158a1af1262431fbc1be2e9b35acdd75e4db94250610454a284dfab69c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://391ed134fdda3868329edaff5ae745adf002bf505aaa0765c3da749625f0311a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"message\\\":\\\"l\\\\nI0104 11:48:40.118355 6476 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0104 11:48:40.118401 6476 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0104 11:48:40.118422 6476 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:48:40.118442 6476 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:48:40.118844 6476 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0104 11:48:40.119444 6476 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:40.120697 6476 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0104 11:48:40.120778 6476 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0104 11:48:40.120792 6476 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0104 11:48:40.120816 6476 factory.go:656] Stopping watch factory\\\\nI0104 11:48:40.120839 6476 ovnkube.go:599] Stopped ovnkube\\\\nI0104 11:48:40.120899 6476 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0104 11:48:40.120915 6476 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0104 11\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39af6158a1af1262431fbc1be2e9b35acdd75e4db94250610454a284dfab69c4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"message\\\":\\\"Bs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-dns-operator/metrics\\\\\\\"}\\\\nI0104 11:48:54.941192 6669 services_controller.go:360] Finished syncing service metrics on namespace openshift-dns-operator for network=default : 864.963µs\\\\nI0104 11:48:54.941235 6669 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console-operator/metrics\\\\\\\"}\\\\nI0104 11:48:54.941255 6669 factory.go:656] Stopping watch factory\\\\nI0104 11:48:54.941272 6669 ovnkube.go:599] Stopped ovnkube\\\\nI0104 11:48:54.941256 6669 services_controller.go:360] Finished syncing service metrics on namespace openshift-console-operator for network=default : 992.936µs\\\\nI0104 11:48:54.941304 6669 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-image-registry/image-registry\\\\\\\"}\\\\nI0104 11:48:54.941318 6669 services_controller.go:360] Finished syncing service image-registry on namespace openshift-image-registry for network=default : 854.682µs\\\\nI0104 11:48:54.941321 6669 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0104 11:48:54.941410 6669 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.415754 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.433077 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65e8a928-4671-4299-840a-812a83f36ba1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e78602107a0c94d79560902c8a3636e32998d67b852202fe4329ddea3c93d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfebfbfc8fadf934d7b9ee31dfcf4533f17e01c47d9a96ad86de85c19f1bb983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9x5wb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.458664 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.472906 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.472945 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.472957 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.472976 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.472987 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:56Z","lastTransitionTime":"2026-01-04T11:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.482150 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.504045 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.529060 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b85db2c1c247bb94afa60d23ada02b9349e9a716de56490ce5c51a76dce9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.556879 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.577151 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01ede350-1819-4157-b857-c68a6a3457dd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2096947ed145a7b9482af4491217b3657fbbc74654790676a06f70c938e59d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a519008319f92603b4fd769aa06e46ece0161210016ae942dde01d828e4153fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b01101e6a58ad97c5626e5e6b439ac29a0c28384bfc9027b58bc7713942206d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b0363ec1bdedddf0eca77d19de840253fdec6bfacc0ba2a2dde00536288c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19b0363ec1bdedddf0eca77d19de840253fdec6bfacc0ba2a2dde00536288c5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.578610 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.578657 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.578675 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.578706 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.578840 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:56Z","lastTransitionTime":"2026-01-04T11:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.597471 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.617819 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.637760 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.653395 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:56Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.682814 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.682891 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.682913 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.682943 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.682966 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:56Z","lastTransitionTime":"2026-01-04T11:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.787414 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.787480 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.787501 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.787532 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.787550 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:56Z","lastTransitionTime":"2026-01-04T11:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.806181 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:48:56 crc kubenswrapper[5003]: E0104 11:48:56.806465 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.891586 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.891664 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.891685 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.891717 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.891739 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:56Z","lastTransitionTime":"2026-01-04T11:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.995009 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.995133 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.995154 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.995183 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:56 crc kubenswrapper[5003]: I0104 11:48:56.995203 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:56Z","lastTransitionTime":"2026-01-04T11:48:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.098345 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.098420 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.098440 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.098468 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.098488 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:57Z","lastTransitionTime":"2026-01-04T11:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.202188 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.202272 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.202292 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.202320 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.202337 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:57Z","lastTransitionTime":"2026-01-04T11:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.266245 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lwxt_e40671d3-61d7-4a50-b4ea-a67e4005fc3f/ovnkube-controller/2.log" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.305128 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.305209 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.305221 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.305242 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.305279 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:57Z","lastTransitionTime":"2026-01-04T11:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.408760 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.408822 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.408841 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.408867 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.408886 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:57Z","lastTransitionTime":"2026-01-04T11:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.512419 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.512498 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.512516 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.512549 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.512592 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:57Z","lastTransitionTime":"2026-01-04T11:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.617008 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.617107 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.617126 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.617149 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.617166 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:57Z","lastTransitionTime":"2026-01-04T11:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.722268 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.722333 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.722349 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.722371 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.722389 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:57Z","lastTransitionTime":"2026-01-04T11:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.806946 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.807002 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.807092 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:57 crc kubenswrapper[5003]: E0104 11:48:57.807178 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:48:57 crc kubenswrapper[5003]: E0104 11:48:57.807313 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:48:57 crc kubenswrapper[5003]: E0104 11:48:57.807440 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.825280 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.825339 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.825357 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.825384 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.825403 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:57Z","lastTransitionTime":"2026-01-04T11:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.932084 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.932156 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.932175 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.932202 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:57 crc kubenswrapper[5003]: I0104 11:48:57.932220 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:57Z","lastTransitionTime":"2026-01-04T11:48:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.035081 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.035122 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.035131 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.035146 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.035156 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:58Z","lastTransitionTime":"2026-01-04T11:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.138395 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.138460 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.138480 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.138518 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.138543 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:58Z","lastTransitionTime":"2026-01-04T11:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.242293 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.242340 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.242348 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.242363 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.242374 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:58Z","lastTransitionTime":"2026-01-04T11:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.346004 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.346102 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.346120 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.346147 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.346165 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:58Z","lastTransitionTime":"2026-01-04T11:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.397740 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.397797 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.397815 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.397837 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.397855 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:58Z","lastTransitionTime":"2026-01-04T11:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:58 crc kubenswrapper[5003]: E0104 11:48:58.418532 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:58Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.424303 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.424536 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.424699 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.424728 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.424746 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:58Z","lastTransitionTime":"2026-01-04T11:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:58 crc kubenswrapper[5003]: E0104 11:48:58.452062 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:58Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.458087 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.458153 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.458174 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.458206 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.458231 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:58Z","lastTransitionTime":"2026-01-04T11:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:58 crc kubenswrapper[5003]: E0104 11:48:58.480150 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:58Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.485373 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.485433 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.485459 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.485485 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.485503 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:58Z","lastTransitionTime":"2026-01-04T11:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:58 crc kubenswrapper[5003]: E0104 11:48:58.508794 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:58Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.514837 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.514878 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.514889 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.514907 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.514919 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:58Z","lastTransitionTime":"2026-01-04T11:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:58 crc kubenswrapper[5003]: E0104 11:48:58.535204 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:48:58Z is after 2025-08-24T17:21:41Z" Jan 04 11:48:58 crc kubenswrapper[5003]: E0104 11:48:58.535638 5003 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.540703 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.540759 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.540780 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.540805 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.540822 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:58Z","lastTransitionTime":"2026-01-04T11:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:58 crc kubenswrapper[5003]: E0104 11:48:58.555496 5003 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 04 11:48:58 crc kubenswrapper[5003]: E0104 11:48:58.555613 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cd6de6ec-2a7c-4842-9d8a-ba4032acb50e-metrics-certs podName:cd6de6ec-2a7c-4842-9d8a-ba4032acb50e nodeName:}" failed. No retries permitted until 2026-01-04 11:49:14.555585923 +0000 UTC m=+70.028615804 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/cd6de6ec-2a7c-4842-9d8a-ba4032acb50e-metrics-certs") pod "network-metrics-daemon-n2zwh" (UID: "cd6de6ec-2a7c-4842-9d8a-ba4032acb50e") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.555256 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cd6de6ec-2a7c-4842-9d8a-ba4032acb50e-metrics-certs\") pod \"network-metrics-daemon-n2zwh\" (UID: \"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e\") " pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.645240 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.645279 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.645292 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.645309 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.645323 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:58Z","lastTransitionTime":"2026-01-04T11:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.747984 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.748044 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.748057 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.748075 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.748085 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:58Z","lastTransitionTime":"2026-01-04T11:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.806777 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:48:58 crc kubenswrapper[5003]: E0104 11:48:58.806891 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.850908 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.850967 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.850984 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.851007 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.851051 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:58Z","lastTransitionTime":"2026-01-04T11:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.953392 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.953428 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.953438 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.953451 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:58 crc kubenswrapper[5003]: I0104 11:48:58.953463 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:58Z","lastTransitionTime":"2026-01-04T11:48:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.056322 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.056397 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.056418 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.056449 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.056470 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:59Z","lastTransitionTime":"2026-01-04T11:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.159123 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.159174 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.159185 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.159204 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.159218 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:59Z","lastTransitionTime":"2026-01-04T11:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.261730 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.261789 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.261806 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.261829 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.261845 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:59Z","lastTransitionTime":"2026-01-04T11:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.364339 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.364596 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.364672 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.364743 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.364805 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:59Z","lastTransitionTime":"2026-01-04T11:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.467876 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.467932 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.467948 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.467971 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.467988 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:59Z","lastTransitionTime":"2026-01-04T11:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.571934 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.572005 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.572077 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.572111 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.572136 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:59Z","lastTransitionTime":"2026-01-04T11:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.675224 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.675274 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.675289 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.675308 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.675319 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:59Z","lastTransitionTime":"2026-01-04T11:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.778272 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.779000 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.779060 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.779099 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.779121 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:59Z","lastTransitionTime":"2026-01-04T11:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.806153 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:48:59 crc kubenswrapper[5003]: E0104 11:48:59.806358 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.806403 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.806481 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:48:59 crc kubenswrapper[5003]: E0104 11:48:59.806558 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:48:59 crc kubenswrapper[5003]: E0104 11:48:59.806679 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.882121 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.882203 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.882223 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.882251 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.882271 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:59Z","lastTransitionTime":"2026-01-04T11:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.986348 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.986426 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.986444 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.986475 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:48:59 crc kubenswrapper[5003]: I0104 11:48:59.986497 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:48:59Z","lastTransitionTime":"2026-01-04T11:48:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.089730 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.089775 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.089784 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.089800 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.089810 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:00Z","lastTransitionTime":"2026-01-04T11:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.193981 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.194077 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.194099 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.194124 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.194142 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:00Z","lastTransitionTime":"2026-01-04T11:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.296424 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.296466 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.296476 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.296489 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.296497 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:00Z","lastTransitionTime":"2026-01-04T11:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.400451 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.400547 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.400579 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.400629 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.400658 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:00Z","lastTransitionTime":"2026-01-04T11:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.504776 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.504831 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.504843 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.504862 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.504877 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:00Z","lastTransitionTime":"2026-01-04T11:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.607909 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.607978 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.607995 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.608045 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.608076 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:00Z","lastTransitionTime":"2026-01-04T11:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.711712 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.712398 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.712440 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.712469 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.712493 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:00Z","lastTransitionTime":"2026-01-04T11:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.806083 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:49:00 crc kubenswrapper[5003]: E0104 11:49:00.806388 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.814977 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.815044 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.815057 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.815075 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.815087 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:00Z","lastTransitionTime":"2026-01-04T11:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.918166 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.918295 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.918321 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.918353 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:00 crc kubenswrapper[5003]: I0104 11:49:00.918376 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:00Z","lastTransitionTime":"2026-01-04T11:49:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.021923 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.021991 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.022046 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.022107 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.022129 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:01Z","lastTransitionTime":"2026-01-04T11:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.124958 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.125004 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.125030 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.125047 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.125059 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:01Z","lastTransitionTime":"2026-01-04T11:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.228373 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.228416 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.228427 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.228443 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.228454 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:01Z","lastTransitionTime":"2026-01-04T11:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.332265 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.332334 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.332352 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.332381 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.332402 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:01Z","lastTransitionTime":"2026-01-04T11:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.435515 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.435574 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.435593 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.435620 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.435640 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:01Z","lastTransitionTime":"2026-01-04T11:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.539414 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.539469 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.539484 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.539502 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.539514 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:01Z","lastTransitionTime":"2026-01-04T11:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.641705 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.641749 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.641760 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.641775 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.641787 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:01Z","lastTransitionTime":"2026-01-04T11:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.745456 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.745527 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.745551 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.746138 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.746187 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:01Z","lastTransitionTime":"2026-01-04T11:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.806756 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.806834 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:01 crc kubenswrapper[5003]: E0104 11:49:01.806921 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.806834 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:01 crc kubenswrapper[5003]: E0104 11:49:01.807130 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:01 crc kubenswrapper[5003]: E0104 11:49:01.807304 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.849519 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.849571 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.849588 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.849612 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.849630 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:01Z","lastTransitionTime":"2026-01-04T11:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.953364 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.953446 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.953472 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.953498 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:01 crc kubenswrapper[5003]: I0104 11:49:01.953518 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:01Z","lastTransitionTime":"2026-01-04T11:49:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.058116 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.058220 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.058246 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.058282 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.058307 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:02Z","lastTransitionTime":"2026-01-04T11:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.162444 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.162540 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.162562 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.162591 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.162610 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:02Z","lastTransitionTime":"2026-01-04T11:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.265473 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.265531 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.265580 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.265604 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.265621 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:02Z","lastTransitionTime":"2026-01-04T11:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.368455 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.368516 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.368534 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.368558 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.368573 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:02Z","lastTransitionTime":"2026-01-04T11:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.472234 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.472322 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.472342 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.472368 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.472389 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:02Z","lastTransitionTime":"2026-01-04T11:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.575511 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.575600 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.575628 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.575667 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.575697 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:02Z","lastTransitionTime":"2026-01-04T11:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.678746 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.678839 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.678866 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.678899 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.678925 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:02Z","lastTransitionTime":"2026-01-04T11:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.782122 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.782202 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.782230 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.782266 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.782290 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:02Z","lastTransitionTime":"2026-01-04T11:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.806301 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:49:02 crc kubenswrapper[5003]: E0104 11:49:02.806599 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.886176 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.886268 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.886295 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.886331 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.886363 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:02Z","lastTransitionTime":"2026-01-04T11:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.990164 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.990236 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.990255 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.990277 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:02 crc kubenswrapper[5003]: I0104 11:49:02.990293 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:02Z","lastTransitionTime":"2026-01-04T11:49:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.093763 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.093825 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.093843 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.093867 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.093886 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:03Z","lastTransitionTime":"2026-01-04T11:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.196758 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.196794 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.196808 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.196824 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.196838 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:03Z","lastTransitionTime":"2026-01-04T11:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.300050 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.300117 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.300135 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.300162 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.300180 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:03Z","lastTransitionTime":"2026-01-04T11:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.404112 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.404185 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.404204 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.404235 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.404254 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:03Z","lastTransitionTime":"2026-01-04T11:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.510636 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.510728 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.510753 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.510788 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.510822 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:03Z","lastTransitionTime":"2026-01-04T11:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.616531 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.616598 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.616610 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.616633 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.616646 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:03Z","lastTransitionTime":"2026-01-04T11:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.719824 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.719881 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.719894 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.719913 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.719925 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:03Z","lastTransitionTime":"2026-01-04T11:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.806635 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.806674 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.806741 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:03 crc kubenswrapper[5003]: E0104 11:49:03.806902 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:03 crc kubenswrapper[5003]: E0104 11:49:03.807002 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:03 crc kubenswrapper[5003]: E0104 11:49:03.807199 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.823066 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.823126 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.823144 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.823174 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.823193 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:03Z","lastTransitionTime":"2026-01-04T11:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.925587 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.925630 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.925640 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.925657 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:03 crc kubenswrapper[5003]: I0104 11:49:03.925669 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:03Z","lastTransitionTime":"2026-01-04T11:49:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.028871 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.028926 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.028937 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.028954 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.028966 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:04Z","lastTransitionTime":"2026-01-04T11:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.133055 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.133105 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.133119 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.133138 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.133153 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:04Z","lastTransitionTime":"2026-01-04T11:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.236546 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.236607 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.236654 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.236678 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.236696 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:04Z","lastTransitionTime":"2026-01-04T11:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.339649 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.339743 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.339762 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.340351 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.340413 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:04Z","lastTransitionTime":"2026-01-04T11:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.444879 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.445045 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.445073 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.445111 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.445139 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:04Z","lastTransitionTime":"2026-01-04T11:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.556719 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.556784 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.556802 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.556836 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.556863 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:04Z","lastTransitionTime":"2026-01-04T11:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.660632 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.660698 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.660711 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.660732 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.660742 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:04Z","lastTransitionTime":"2026-01-04T11:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.764301 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.764352 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.764409 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.764433 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.764452 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:04Z","lastTransitionTime":"2026-01-04T11:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.807045 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:49:04 crc kubenswrapper[5003]: E0104 11:49:04.807409 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.824991 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:04Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.838695 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2zwh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2zwh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:04Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.855997 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:04Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.867583 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.867662 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.867678 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.867700 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.867714 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:04Z","lastTransitionTime":"2026-01-04T11:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.873960 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:04Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.894133 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:04Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.925737 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39af6158a1af1262431fbc1be2e9b35acdd75e4db94250610454a284dfab69c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://391ed134fdda3868329edaff5ae745adf002bf505aaa0765c3da749625f0311a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:40Z\\\",\\\"message\\\":\\\"l\\\\nI0104 11:48:40.118355 6476 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0104 11:48:40.118401 6476 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0104 11:48:40.118422 6476 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0104 11:48:40.118442 6476 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0104 11:48:40.118844 6476 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0104 11:48:40.119444 6476 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI0104 11:48:40.120697 6476 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0104 11:48:40.120778 6476 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0104 11:48:40.120792 6476 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0104 11:48:40.120816 6476 factory.go:656] Stopping watch factory\\\\nI0104 11:48:40.120839 6476 ovnkube.go:599] Stopped ovnkube\\\\nI0104 11:48:40.120899 6476 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0104 11:48:40.120915 6476 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0104 11\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:39Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39af6158a1af1262431fbc1be2e9b35acdd75e4db94250610454a284dfab69c4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"message\\\":\\\"Bs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-dns-operator/metrics\\\\\\\"}\\\\nI0104 11:48:54.941192 6669 services_controller.go:360] Finished syncing service metrics on namespace openshift-dns-operator for network=default : 864.963µs\\\\nI0104 11:48:54.941235 6669 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console-operator/metrics\\\\\\\"}\\\\nI0104 11:48:54.941255 6669 factory.go:656] Stopping watch factory\\\\nI0104 11:48:54.941272 6669 ovnkube.go:599] Stopped ovnkube\\\\nI0104 11:48:54.941256 6669 services_controller.go:360] Finished syncing service metrics on namespace openshift-console-operator for network=default : 992.936µs\\\\nI0104 11:48:54.941304 6669 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-image-registry/image-registry\\\\\\\"}\\\\nI0104 11:48:54.941318 6669 services_controller.go:360] Finished syncing service image-registry on namespace openshift-image-registry for network=default : 854.682µs\\\\nI0104 11:48:54.941321 6669 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0104 11:48:54.941410 6669 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:04Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.936519 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:04Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.952324 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65e8a928-4671-4299-840a-812a83f36ba1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e78602107a0c94d79560902c8a3636e32998d67b852202fe4329ddea3c93d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfebfbfc8fadf934d7b9ee31dfcf4533f17e01c47d9a96ad86de85c19f1bb983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9x5wb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:04Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.967389 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:04Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.973725 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.973891 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.973912 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.973940 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.973985 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:04Z","lastTransitionTime":"2026-01-04T11:49:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:04 crc kubenswrapper[5003]: I0104 11:49:04.987595 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:04Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.004950 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:05Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.026105 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b85db2c1c247bb94afa60d23ada02b9349e9a716de56490ce5c51a76dce9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:05Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.062412 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:05Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.076764 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.076823 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.076842 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.076870 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.076891 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:05Z","lastTransitionTime":"2026-01-04T11:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.081295 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01ede350-1819-4157-b857-c68a6a3457dd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2096947ed145a7b9482af4491217b3657fbbc74654790676a06f70c938e59d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a519008319f92603b4fd769aa06e46ece0161210016ae942dde01d828e4153fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b01101e6a58ad97c5626e5e6b439ac29a0c28384bfc9027b58bc7713942206d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b0363ec1bdedddf0eca77d19de840253fdec6bfacc0ba2a2dde00536288c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19b0363ec1bdedddf0eca77d19de840253fdec6bfacc0ba2a2dde00536288c5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:05Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.098916 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:05Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.118753 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:05Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.134321 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:05Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.147299 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:05Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.179979 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.180133 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.180153 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.180210 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.180232 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:05Z","lastTransitionTime":"2026-01-04T11:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.282877 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.282988 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.282997 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.283065 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.283086 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:05Z","lastTransitionTime":"2026-01-04T11:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.386968 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.387073 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.387093 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.387124 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.387149 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:05Z","lastTransitionTime":"2026-01-04T11:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.490306 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.490390 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.490411 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.490442 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.490461 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:05Z","lastTransitionTime":"2026-01-04T11:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.594139 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.594180 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.594191 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.594210 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.594221 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:05Z","lastTransitionTime":"2026-01-04T11:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.697478 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.697527 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.697544 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.697567 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.697586 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:05Z","lastTransitionTime":"2026-01-04T11:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.801044 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.801112 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.801131 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.801155 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.801180 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:05Z","lastTransitionTime":"2026-01-04T11:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.806451 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:05 crc kubenswrapper[5003]: E0104 11:49:05.806669 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.806956 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:05 crc kubenswrapper[5003]: E0104 11:49:05.807195 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.806957 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:05 crc kubenswrapper[5003]: E0104 11:49:05.807348 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.903569 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.903658 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.903678 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.903709 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:05 crc kubenswrapper[5003]: I0104 11:49:05.903731 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:05Z","lastTransitionTime":"2026-01-04T11:49:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.006348 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.006395 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.006405 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.006423 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.006434 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:06Z","lastTransitionTime":"2026-01-04T11:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.109093 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.109173 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.109193 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.109225 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.109244 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:06Z","lastTransitionTime":"2026-01-04T11:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.212582 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.212635 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.212651 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.212675 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.212694 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:06Z","lastTransitionTime":"2026-01-04T11:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.315604 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.315664 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.315674 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.315688 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.315716 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:06Z","lastTransitionTime":"2026-01-04T11:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.419375 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.419414 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.419424 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.419444 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.419456 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:06Z","lastTransitionTime":"2026-01-04T11:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.522465 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.522561 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.522589 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.522622 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.522647 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:06Z","lastTransitionTime":"2026-01-04T11:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.627059 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.627131 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.627148 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.627174 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.627192 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:06Z","lastTransitionTime":"2026-01-04T11:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.730684 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.730752 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.730772 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.730804 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.730830 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:06Z","lastTransitionTime":"2026-01-04T11:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.806098 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:49:06 crc kubenswrapper[5003]: E0104 11:49:06.806299 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.833640 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.833696 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.833732 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.833762 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.833787 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:06Z","lastTransitionTime":"2026-01-04T11:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.936770 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.936837 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.936860 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.936892 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:06 crc kubenswrapper[5003]: I0104 11:49:06.936911 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:06Z","lastTransitionTime":"2026-01-04T11:49:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.040582 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.040664 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.040690 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.040722 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.040746 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:07Z","lastTransitionTime":"2026-01-04T11:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.145934 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.146072 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.146099 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.146130 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.146155 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:07Z","lastTransitionTime":"2026-01-04T11:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.249136 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.249205 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.249221 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.249247 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.249266 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:07Z","lastTransitionTime":"2026-01-04T11:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.352495 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.352564 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.352580 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.352604 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.352622 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:07Z","lastTransitionTime":"2026-01-04T11:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.456123 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.456185 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.456202 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.456227 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.456246 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:07Z","lastTransitionTime":"2026-01-04T11:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.559310 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.559377 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.559395 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.559419 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.559439 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:07Z","lastTransitionTime":"2026-01-04T11:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.661993 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.662102 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.662129 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.662159 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.662180 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:07Z","lastTransitionTime":"2026-01-04T11:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.765419 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.765472 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.765489 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.765513 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.765531 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:07Z","lastTransitionTime":"2026-01-04T11:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.805901 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.806078 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.806215 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:07 crc kubenswrapper[5003]: E0104 11:49:07.806355 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:07 crc kubenswrapper[5003]: E0104 11:49:07.806673 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:07 crc kubenswrapper[5003]: E0104 11:49:07.806562 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.868639 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.868700 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.868719 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.868743 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.868760 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:07Z","lastTransitionTime":"2026-01-04T11:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.971488 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.971542 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.971558 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.971581 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:07 crc kubenswrapper[5003]: I0104 11:49:07.971596 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:07Z","lastTransitionTime":"2026-01-04T11:49:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.074355 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.074413 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.074433 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.074460 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.074478 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:08Z","lastTransitionTime":"2026-01-04T11:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.177769 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.177814 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.177828 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.177845 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.177856 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:08Z","lastTransitionTime":"2026-01-04T11:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.280153 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.280211 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.280225 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.280244 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.280257 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:08Z","lastTransitionTime":"2026-01-04T11:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.383857 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.384293 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.384468 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.384617 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.384765 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:08Z","lastTransitionTime":"2026-01-04T11:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.488051 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.488100 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.488113 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.488131 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.488144 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:08Z","lastTransitionTime":"2026-01-04T11:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.590894 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.590944 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.590952 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.590967 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.590976 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:08Z","lastTransitionTime":"2026-01-04T11:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.596377 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.596430 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.596444 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.596461 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.596474 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:08Z","lastTransitionTime":"2026-01-04T11:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:08 crc kubenswrapper[5003]: E0104 11:49:08.612191 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:08Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.615359 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.615388 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.615399 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.615415 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.615426 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:08Z","lastTransitionTime":"2026-01-04T11:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:08 crc kubenswrapper[5003]: E0104 11:49:08.629728 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:08Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.632994 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.633062 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.633082 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.633102 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.633114 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:08Z","lastTransitionTime":"2026-01-04T11:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:08 crc kubenswrapper[5003]: E0104 11:49:08.653205 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:08Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.656645 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.656691 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.656705 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.656725 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.656740 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:08Z","lastTransitionTime":"2026-01-04T11:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:08 crc kubenswrapper[5003]: E0104 11:49:08.671175 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:08Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.678932 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.678982 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.679006 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.679054 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.679077 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:08Z","lastTransitionTime":"2026-01-04T11:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:08 crc kubenswrapper[5003]: E0104 11:49:08.696864 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:08Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:08 crc kubenswrapper[5003]: E0104 11:49:08.697369 5003 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.699930 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.700134 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.700236 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.700340 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.700427 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:08Z","lastTransitionTime":"2026-01-04T11:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.803349 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.803753 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.803919 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.803999 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.804130 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:08Z","lastTransitionTime":"2026-01-04T11:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.805706 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:49:08 crc kubenswrapper[5003]: E0104 11:49:08.805908 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.806664 5003 scope.go:117] "RemoveContainer" containerID="39af6158a1af1262431fbc1be2e9b35acdd75e4db94250610454a284dfab69c4" Jan 04 11:49:08 crc kubenswrapper[5003]: E0104 11:49:08.806902 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2lwxt_openshift-ovn-kubernetes(e40671d3-61d7-4a50-b4ea-a67e4005fc3f)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.835703 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:08Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.850479 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:08Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.863302 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:08Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.881879 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b85db2c1c247bb94afa60d23ada02b9349e9a716de56490ce5c51a76dce9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:08Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.899539 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01ede350-1819-4157-b857-c68a6a3457dd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2096947ed145a7b9482af4491217b3657fbbc74654790676a06f70c938e59d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a519008319f92603b4fd769aa06e46ece0161210016ae942dde01d828e4153fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b01101e6a58ad97c5626e5e6b439ac29a0c28384bfc9027b58bc7713942206d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b0363ec1bdedddf0eca77d19de840253fdec6bfacc0ba2a2dde00536288c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19b0363ec1bdedddf0eca77d19de840253fdec6bfacc0ba2a2dde00536288c5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:08Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.906900 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.906956 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.906973 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.906999 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.907032 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:08Z","lastTransitionTime":"2026-01-04T11:49:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.916477 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:08Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.931774 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:08Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.945938 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:08Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.960543 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:08Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.974759 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:08Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:08 crc kubenswrapper[5003]: I0104 11:49:08.990454 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:08Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.002428 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2zwh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2zwh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:09Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.010071 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.010117 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.010131 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.010152 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.010168 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:09Z","lastTransitionTime":"2026-01-04T11:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.019633 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:09Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.034547 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:09Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.055707 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:09Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.088345 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39af6158a1af1262431fbc1be2e9b35acdd75e4db94250610454a284dfab69c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39af6158a1af1262431fbc1be2e9b35acdd75e4db94250610454a284dfab69c4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"message\\\":\\\"Bs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-dns-operator/metrics\\\\\\\"}\\\\nI0104 11:48:54.941192 6669 services_controller.go:360] Finished syncing service metrics on namespace openshift-dns-operator for network=default : 864.963µs\\\\nI0104 11:48:54.941235 6669 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console-operator/metrics\\\\\\\"}\\\\nI0104 11:48:54.941255 6669 factory.go:656] Stopping watch factory\\\\nI0104 11:48:54.941272 6669 ovnkube.go:599] Stopped ovnkube\\\\nI0104 11:48:54.941256 6669 services_controller.go:360] Finished syncing service metrics on namespace openshift-console-operator for network=default : 992.936µs\\\\nI0104 11:48:54.941304 6669 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-image-registry/image-registry\\\\\\\"}\\\\nI0104 11:48:54.941318 6669 services_controller.go:360] Finished syncing service image-registry on namespace openshift-image-registry for network=default : 854.682µs\\\\nI0104 11:48:54.941321 6669 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0104 11:48:54.941410 6669 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2lwxt_openshift-ovn-kubernetes(e40671d3-61d7-4a50-b4ea-a67e4005fc3f)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:09Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.105403 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:09Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.112371 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.112585 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.112663 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.112766 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.112846 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:09Z","lastTransitionTime":"2026-01-04T11:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.118451 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65e8a928-4671-4299-840a-812a83f36ba1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e78602107a0c94d79560902c8a3636e32998d67b852202fe4329ddea3c93d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfebfbfc8fadf934d7b9ee31dfcf4533f17e01c47d9a96ad86de85c19f1bb983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9x5wb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:09Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.215430 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.215497 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.215506 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.215539 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.215554 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:09Z","lastTransitionTime":"2026-01-04T11:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.317384 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.317438 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.317447 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.317461 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.317470 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:09Z","lastTransitionTime":"2026-01-04T11:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.419769 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.419834 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.419852 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.419876 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.419891 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:09Z","lastTransitionTime":"2026-01-04T11:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.522563 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.522676 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.522732 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.522816 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.522890 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:09Z","lastTransitionTime":"2026-01-04T11:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.625475 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.625837 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.625947 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.626090 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.626216 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:09Z","lastTransitionTime":"2026-01-04T11:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.728623 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.728684 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.728707 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.728736 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.728755 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:09Z","lastTransitionTime":"2026-01-04T11:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.805785 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:09 crc kubenswrapper[5003]: E0104 11:49:09.806162 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.805851 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:09 crc kubenswrapper[5003]: E0104 11:49:09.806396 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.805796 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:09 crc kubenswrapper[5003]: E0104 11:49:09.806621 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.832710 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.832760 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.832789 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.832815 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.832833 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:09Z","lastTransitionTime":"2026-01-04T11:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.935712 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.935756 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.935791 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.935810 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:09 crc kubenswrapper[5003]: I0104 11:49:09.935822 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:09Z","lastTransitionTime":"2026-01-04T11:49:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.039318 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.039372 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.039389 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.039409 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.039426 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:10Z","lastTransitionTime":"2026-01-04T11:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.141698 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.141742 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.141754 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.141772 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.141783 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:10Z","lastTransitionTime":"2026-01-04T11:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.245072 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.245135 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.245149 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.245164 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.245176 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:10Z","lastTransitionTime":"2026-01-04T11:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.347453 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.347483 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.347492 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.347503 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.347512 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:10Z","lastTransitionTime":"2026-01-04T11:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.450561 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.450614 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.450627 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.450646 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.450660 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:10Z","lastTransitionTime":"2026-01-04T11:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.553256 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.553307 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.553319 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.553335 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.553348 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:10Z","lastTransitionTime":"2026-01-04T11:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.656312 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.656350 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.656359 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.656375 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.656386 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:10Z","lastTransitionTime":"2026-01-04T11:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.759109 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.759160 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.759171 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.759189 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.759201 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:10Z","lastTransitionTime":"2026-01-04T11:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.806476 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:49:10 crc kubenswrapper[5003]: E0104 11:49:10.806644 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.862548 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.862598 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.862609 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.862628 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.862637 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:10Z","lastTransitionTime":"2026-01-04T11:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.965986 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.966125 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.966151 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.966183 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:10 crc kubenswrapper[5003]: I0104 11:49:10.966205 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:10Z","lastTransitionTime":"2026-01-04T11:49:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.068617 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.069602 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.069657 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.069692 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.069722 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:11Z","lastTransitionTime":"2026-01-04T11:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.173946 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.174056 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.174079 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.174109 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.174135 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:11Z","lastTransitionTime":"2026-01-04T11:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.277271 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.277395 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.277419 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.277450 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.277473 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:11Z","lastTransitionTime":"2026-01-04T11:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.381369 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.381438 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.381463 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.381497 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.381524 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:11Z","lastTransitionTime":"2026-01-04T11:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.484618 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.484662 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.484672 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.484691 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.484702 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:11Z","lastTransitionTime":"2026-01-04T11:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.588109 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.588531 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.588670 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.588837 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.589032 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:11Z","lastTransitionTime":"2026-01-04T11:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.693098 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.693151 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.693170 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.693197 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.693212 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:11Z","lastTransitionTime":"2026-01-04T11:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.795797 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.795885 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.795916 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.795943 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.795961 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:11Z","lastTransitionTime":"2026-01-04T11:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.806440 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:11 crc kubenswrapper[5003]: E0104 11:49:11.806542 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.806681 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.806737 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:11 crc kubenswrapper[5003]: E0104 11:49:11.806938 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:11 crc kubenswrapper[5003]: E0104 11:49:11.807079 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.898465 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.898763 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.898840 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.898928 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:11 crc kubenswrapper[5003]: I0104 11:49:11.899001 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:11Z","lastTransitionTime":"2026-01-04T11:49:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.002399 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.002589 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.002608 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.002632 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.002647 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:12Z","lastTransitionTime":"2026-01-04T11:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.104721 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.104782 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.104797 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.104817 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.104830 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:12Z","lastTransitionTime":"2026-01-04T11:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.208067 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.208108 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.208117 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.208131 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.208141 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:12Z","lastTransitionTime":"2026-01-04T11:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.311252 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.311288 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.311302 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.311317 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.311326 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:12Z","lastTransitionTime":"2026-01-04T11:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.416848 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.417492 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.417510 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.417526 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.417537 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:12Z","lastTransitionTime":"2026-01-04T11:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.520078 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.520155 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.520181 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.520212 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.520232 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:12Z","lastTransitionTime":"2026-01-04T11:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.623366 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.623406 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.623414 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.623428 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.623437 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:12Z","lastTransitionTime":"2026-01-04T11:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.726096 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.726674 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.726950 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.727162 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.727373 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:12Z","lastTransitionTime":"2026-01-04T11:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.806407 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:49:12 crc kubenswrapper[5003]: E0104 11:49:12.806593 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.830565 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.830633 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.830643 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.830655 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.830665 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:12Z","lastTransitionTime":"2026-01-04T11:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.933400 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.933884 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.934141 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.934335 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:12 crc kubenswrapper[5003]: I0104 11:49:12.934484 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:12Z","lastTransitionTime":"2026-01-04T11:49:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.037557 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.038595 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.038739 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.038936 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.039232 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:13Z","lastTransitionTime":"2026-01-04T11:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.142964 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.143293 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.143392 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.143477 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.143562 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:13Z","lastTransitionTime":"2026-01-04T11:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.246652 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.246704 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.246718 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.246735 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.246746 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:13Z","lastTransitionTime":"2026-01-04T11:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.349430 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.349468 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.349477 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.349496 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.349507 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:13Z","lastTransitionTime":"2026-01-04T11:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.452229 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.452291 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.452302 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.452319 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.452335 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:13Z","lastTransitionTime":"2026-01-04T11:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.555711 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.555770 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.555787 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.555814 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.555905 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:13Z","lastTransitionTime":"2026-01-04T11:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.658353 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.658404 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.658416 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.658432 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.658444 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:13Z","lastTransitionTime":"2026-01-04T11:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.760621 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.760661 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.760671 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.760686 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.760696 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:13Z","lastTransitionTime":"2026-01-04T11:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.806370 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.806471 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.806506 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:13 crc kubenswrapper[5003]: E0104 11:49:13.806628 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:13 crc kubenswrapper[5003]: E0104 11:49:13.806701 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:13 crc kubenswrapper[5003]: E0104 11:49:13.806790 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.862994 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.863053 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.863062 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.863075 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.863085 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:13Z","lastTransitionTime":"2026-01-04T11:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.965801 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.965840 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.965851 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.965867 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:13 crc kubenswrapper[5003]: I0104 11:49:13.965879 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:13Z","lastTransitionTime":"2026-01-04T11:49:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.068535 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.068578 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.068589 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.068609 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.068622 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:14Z","lastTransitionTime":"2026-01-04T11:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.171289 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.171349 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.171371 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.171397 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.171415 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:14Z","lastTransitionTime":"2026-01-04T11:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.275451 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.275492 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.275502 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.275518 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.275527 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:14Z","lastTransitionTime":"2026-01-04T11:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.378921 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.378963 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.378996 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.379027 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.379040 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:14Z","lastTransitionTime":"2026-01-04T11:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.481751 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.481784 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.481793 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.481806 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.481814 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:14Z","lastTransitionTime":"2026-01-04T11:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.584812 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.584869 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.584885 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.584909 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.584928 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:14Z","lastTransitionTime":"2026-01-04T11:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.634065 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cd6de6ec-2a7c-4842-9d8a-ba4032acb50e-metrics-certs\") pod \"network-metrics-daemon-n2zwh\" (UID: \"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e\") " pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:49:14 crc kubenswrapper[5003]: E0104 11:49:14.634329 5003 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 04 11:49:14 crc kubenswrapper[5003]: E0104 11:49:14.634454 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cd6de6ec-2a7c-4842-9d8a-ba4032acb50e-metrics-certs podName:cd6de6ec-2a7c-4842-9d8a-ba4032acb50e nodeName:}" failed. No retries permitted until 2026-01-04 11:49:46.634421445 +0000 UTC m=+102.107451326 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/cd6de6ec-2a7c-4842-9d8a-ba4032acb50e-metrics-certs") pod "network-metrics-daemon-n2zwh" (UID: "cd6de6ec-2a7c-4842-9d8a-ba4032acb50e") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.687067 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.687104 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.687115 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.687133 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.687146 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:14Z","lastTransitionTime":"2026-01-04T11:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.790308 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.790702 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.790714 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.790733 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.790747 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:14Z","lastTransitionTime":"2026-01-04T11:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.806276 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:49:14 crc kubenswrapper[5003]: E0104 11:49:14.806420 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.824976 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:14Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.847239 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:14Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.870460 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b85db2c1c247bb94afa60d23ada02b9349e9a716de56490ce5c51a76dce9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:14Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.891276 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:14Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.893123 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.893159 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.893167 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.893190 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.893204 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:14Z","lastTransitionTime":"2026-01-04T11:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.909279 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01ede350-1819-4157-b857-c68a6a3457dd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2096947ed145a7b9482af4491217b3657fbbc74654790676a06f70c938e59d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a519008319f92603b4fd769aa06e46ece0161210016ae942dde01d828e4153fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b01101e6a58ad97c5626e5e6b439ac29a0c28384bfc9027b58bc7713942206d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b0363ec1bdedddf0eca77d19de840253fdec6bfacc0ba2a2dde00536288c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19b0363ec1bdedddf0eca77d19de840253fdec6bfacc0ba2a2dde00536288c5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:14Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.926708 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:14Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.940501 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:14Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.952590 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:14Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.965178 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:14Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.976501 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:14Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.989074 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2zwh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2zwh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:14Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.996304 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.996343 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.996354 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.996376 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:14 crc kubenswrapper[5003]: I0104 11:49:14.996390 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:14Z","lastTransitionTime":"2026-01-04T11:49:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.000338 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:14Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.012306 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:15Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.025276 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:15Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.044568 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39af6158a1af1262431fbc1be2e9b35acdd75e4db94250610454a284dfab69c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39af6158a1af1262431fbc1be2e9b35acdd75e4db94250610454a284dfab69c4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"message\\\":\\\"Bs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-dns-operator/metrics\\\\\\\"}\\\\nI0104 11:48:54.941192 6669 services_controller.go:360] Finished syncing service metrics on namespace openshift-dns-operator for network=default : 864.963µs\\\\nI0104 11:48:54.941235 6669 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console-operator/metrics\\\\\\\"}\\\\nI0104 11:48:54.941255 6669 factory.go:656] Stopping watch factory\\\\nI0104 11:48:54.941272 6669 ovnkube.go:599] Stopped ovnkube\\\\nI0104 11:48:54.941256 6669 services_controller.go:360] Finished syncing service metrics on namespace openshift-console-operator for network=default : 992.936µs\\\\nI0104 11:48:54.941304 6669 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-image-registry/image-registry\\\\\\\"}\\\\nI0104 11:48:54.941318 6669 services_controller.go:360] Finished syncing service image-registry on namespace openshift-image-registry for network=default : 854.682µs\\\\nI0104 11:48:54.941321 6669 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0104 11:48:54.941410 6669 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2lwxt_openshift-ovn-kubernetes(e40671d3-61d7-4a50-b4ea-a67e4005fc3f)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:15Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.056389 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:15Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.067777 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65e8a928-4671-4299-840a-812a83f36ba1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e78602107a0c94d79560902c8a3636e32998d67b852202fe4329ddea3c93d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfebfbfc8fadf934d7b9ee31dfcf4533f17e01c47d9a96ad86de85c19f1bb983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9x5wb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:15Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.080887 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:15Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.099316 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.099434 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.099510 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.099580 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.099661 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:15Z","lastTransitionTime":"2026-01-04T11:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.203362 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.203480 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.203512 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.203544 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.203566 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:15Z","lastTransitionTime":"2026-01-04T11:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.306863 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.306935 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.306958 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.306987 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.307006 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:15Z","lastTransitionTime":"2026-01-04T11:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.339528 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-np5qh_6e5d41d8-142e-4ca3-a20a-f6d338aaddf2/kube-multus/0.log" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.339768 5003 generic.go:334] "Generic (PLEG): container finished" podID="6e5d41d8-142e-4ca3-a20a-f6d338aaddf2" containerID="32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1" exitCode=1 Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.339820 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-np5qh" event={"ID":"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2","Type":"ContainerDied","Data":"32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1"} Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.340354 5003 scope.go:117] "RemoveContainer" containerID="32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.359301 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:15Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.379109 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65e8a928-4671-4299-840a-812a83f36ba1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e78602107a0c94d79560902c8a3636e32998d67b852202fe4329ddea3c93d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfebfbfc8fadf934d7b9ee31dfcf4533f17e01c47d9a96ad86de85c19f1bb983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9x5wb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:15Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.399542 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:15Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.411030 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.411069 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.411081 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.411098 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.411108 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:15Z","lastTransitionTime":"2026-01-04T11:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.419348 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:15Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.434309 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:14Z\\\",\\\"message\\\":\\\"2026-01-04T11:48:29+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d21e0005-2905-4ac9-9eec-b3f248c9a5bd\\\\n2026-01-04T11:48:29+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d21e0005-2905-4ac9-9eec-b3f248c9a5bd to /host/opt/cni/bin/\\\\n2026-01-04T11:48:29Z [verbose] multus-daemon started\\\\n2026-01-04T11:48:29Z [verbose] Readiness Indicator file check\\\\n2026-01-04T11:49:14Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:15Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.455132 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39af6158a1af1262431fbc1be2e9b35acdd75e4db94250610454a284dfab69c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39af6158a1af1262431fbc1be2e9b35acdd75e4db94250610454a284dfab69c4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"message\\\":\\\"Bs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-dns-operator/metrics\\\\\\\"}\\\\nI0104 11:48:54.941192 6669 services_controller.go:360] Finished syncing service metrics on namespace openshift-dns-operator for network=default : 864.963µs\\\\nI0104 11:48:54.941235 6669 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console-operator/metrics\\\\\\\"}\\\\nI0104 11:48:54.941255 6669 factory.go:656] Stopping watch factory\\\\nI0104 11:48:54.941272 6669 ovnkube.go:599] Stopped ovnkube\\\\nI0104 11:48:54.941256 6669 services_controller.go:360] Finished syncing service metrics on namespace openshift-console-operator for network=default : 992.936µs\\\\nI0104 11:48:54.941304 6669 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-image-registry/image-registry\\\\\\\"}\\\\nI0104 11:48:54.941318 6669 services_controller.go:360] Finished syncing service image-registry on namespace openshift-image-registry for network=default : 854.682µs\\\\nI0104 11:48:54.941321 6669 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0104 11:48:54.941410 6669 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2lwxt_openshift-ovn-kubernetes(e40671d3-61d7-4a50-b4ea-a67e4005fc3f)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:15Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.486155 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:15Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.504150 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:15Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.513934 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.514005 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.514051 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.514085 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.514097 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:15Z","lastTransitionTime":"2026-01-04T11:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.518275 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:15Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.537001 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b85db2c1c247bb94afa60d23ada02b9349e9a716de56490ce5c51a76dce9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:15Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.550073 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:15Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.567935 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:15Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.588266 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01ede350-1819-4157-b857-c68a6a3457dd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2096947ed145a7b9482af4491217b3657fbbc74654790676a06f70c938e59d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a519008319f92603b4fd769aa06e46ece0161210016ae942dde01d828e4153fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b01101e6a58ad97c5626e5e6b439ac29a0c28384bfc9027b58bc7713942206d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b0363ec1bdedddf0eca77d19de840253fdec6bfacc0ba2a2dde00536288c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19b0363ec1bdedddf0eca77d19de840253fdec6bfacc0ba2a2dde00536288c5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:15Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.612344 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:15Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.616497 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.616530 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.616541 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.616557 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.616569 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:15Z","lastTransitionTime":"2026-01-04T11:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.635452 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:15Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.649755 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:15Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.669177 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:15Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.684922 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2zwh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2zwh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:15Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.719248 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.719302 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.719322 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.719346 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.719363 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:15Z","lastTransitionTime":"2026-01-04T11:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.806166 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:15 crc kubenswrapper[5003]: E0104 11:49:15.806307 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.806390 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.806490 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:15 crc kubenswrapper[5003]: E0104 11:49:15.806510 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:15 crc kubenswrapper[5003]: E0104 11:49:15.806550 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.821866 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.821913 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.822092 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.822112 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.822174 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.822193 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:15Z","lastTransitionTime":"2026-01-04T11:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.925630 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.925710 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.925732 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.925761 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:15 crc kubenswrapper[5003]: I0104 11:49:15.925783 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:15Z","lastTransitionTime":"2026-01-04T11:49:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.028409 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.028474 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.028486 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.028499 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.028508 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:16Z","lastTransitionTime":"2026-01-04T11:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.131500 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.132082 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.132183 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.132264 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.132329 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:16Z","lastTransitionTime":"2026-01-04T11:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.235382 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.235431 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.235445 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.235464 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.235480 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:16Z","lastTransitionTime":"2026-01-04T11:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.338291 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.338705 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.338800 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.338921 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.339055 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:16Z","lastTransitionTime":"2026-01-04T11:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.345740 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-np5qh_6e5d41d8-142e-4ca3-a20a-f6d338aaddf2/kube-multus/0.log" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.345858 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-np5qh" event={"ID":"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2","Type":"ContainerStarted","Data":"0cbf399dee48d6e9caa2594fb848b76a2dfc1afea412cbbf8a72f715f6c8e4e7"} Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.361499 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:16Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.381603 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:16Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.401857 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf399dee48d6e9caa2594fb848b76a2dfc1afea412cbbf8a72f715f6c8e4e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:14Z\\\",\\\"message\\\":\\\"2026-01-04T11:48:29+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d21e0005-2905-4ac9-9eec-b3f248c9a5bd\\\\n2026-01-04T11:48:29+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d21e0005-2905-4ac9-9eec-b3f248c9a5bd to /host/opt/cni/bin/\\\\n2026-01-04T11:48:29Z [verbose] multus-daemon started\\\\n2026-01-04T11:48:29Z [verbose] Readiness Indicator file check\\\\n2026-01-04T11:49:14Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:49:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:16Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.431473 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39af6158a1af1262431fbc1be2e9b35acdd75e4db94250610454a284dfab69c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39af6158a1af1262431fbc1be2e9b35acdd75e4db94250610454a284dfab69c4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"message\\\":\\\"Bs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-dns-operator/metrics\\\\\\\"}\\\\nI0104 11:48:54.941192 6669 services_controller.go:360] Finished syncing service metrics on namespace openshift-dns-operator for network=default : 864.963µs\\\\nI0104 11:48:54.941235 6669 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console-operator/metrics\\\\\\\"}\\\\nI0104 11:48:54.941255 6669 factory.go:656] Stopping watch factory\\\\nI0104 11:48:54.941272 6669 ovnkube.go:599] Stopped ovnkube\\\\nI0104 11:48:54.941256 6669 services_controller.go:360] Finished syncing service metrics on namespace openshift-console-operator for network=default : 992.936µs\\\\nI0104 11:48:54.941304 6669 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-image-registry/image-registry\\\\\\\"}\\\\nI0104 11:48:54.941318 6669 services_controller.go:360] Finished syncing service image-registry on namespace openshift-image-registry for network=default : 854.682µs\\\\nI0104 11:48:54.941321 6669 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0104 11:48:54.941410 6669 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2lwxt_openshift-ovn-kubernetes(e40671d3-61d7-4a50-b4ea-a67e4005fc3f)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:16Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.441466 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.441507 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.441516 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.441551 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.441563 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:16Z","lastTransitionTime":"2026-01-04T11:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.447050 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:16Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.460268 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65e8a928-4671-4299-840a-812a83f36ba1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e78602107a0c94d79560902c8a3636e32998d67b852202fe4329ddea3c93d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfebfbfc8fadf934d7b9ee31dfcf4533f17e01c47d9a96ad86de85c19f1bb983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9x5wb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:16Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.488922 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:16Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.506192 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:16Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.522241 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:16Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.545607 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.545658 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.545671 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.545690 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.545702 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:16Z","lastTransitionTime":"2026-01-04T11:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.547151 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b85db2c1c247bb94afa60d23ada02b9349e9a716de56490ce5c51a76dce9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:16Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.563647 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0f60dfb-d71c-415e-840b-4e5cef6037b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c0d47ed48809e56449b68ddfc86eb427381be87a8df60f581867c82d8c44890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://207fc3c1ffae679d524ce82df81b41f08d722b0923d520ef075f213bc6b71cde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://207fc3c1ffae679d524ce82df81b41f08d722b0923d520ef075f213bc6b71cde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:16Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.581302 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01ede350-1819-4157-b857-c68a6a3457dd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2096947ed145a7b9482af4491217b3657fbbc74654790676a06f70c938e59d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a519008319f92603b4fd769aa06e46ece0161210016ae942dde01d828e4153fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b01101e6a58ad97c5626e5e6b439ac29a0c28384bfc9027b58bc7713942206d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b0363ec1bdedddf0eca77d19de840253fdec6bfacc0ba2a2dde00536288c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19b0363ec1bdedddf0eca77d19de840253fdec6bfacc0ba2a2dde00536288c5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:16Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.594361 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:16Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.607719 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:16Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.622709 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:16Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.636217 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:16Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.648317 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.648355 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.648368 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.648385 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.648396 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:16Z","lastTransitionTime":"2026-01-04T11:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.651698 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:16Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.663560 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:16Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.678258 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2zwh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2zwh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:16Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.750988 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.751052 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.751064 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.751083 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.751096 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:16Z","lastTransitionTime":"2026-01-04T11:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.805802 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:49:16 crc kubenswrapper[5003]: E0104 11:49:16.806292 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.853606 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.853652 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.853668 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.853691 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.853709 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:16Z","lastTransitionTime":"2026-01-04T11:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.956675 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.956719 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.956728 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.956743 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:16 crc kubenswrapper[5003]: I0104 11:49:16.956753 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:16Z","lastTransitionTime":"2026-01-04T11:49:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.060118 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.060166 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.060176 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.060195 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.060209 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:17Z","lastTransitionTime":"2026-01-04T11:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.163035 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.163090 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.163104 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.163121 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.163132 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:17Z","lastTransitionTime":"2026-01-04T11:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.266125 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.266375 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.266571 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.266700 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.266765 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:17Z","lastTransitionTime":"2026-01-04T11:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.369817 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.369899 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.369914 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.369929 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.369941 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:17Z","lastTransitionTime":"2026-01-04T11:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.472087 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.472157 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.472180 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.472208 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.472228 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:17Z","lastTransitionTime":"2026-01-04T11:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.574997 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.575373 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.575468 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.575581 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.575686 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:17Z","lastTransitionTime":"2026-01-04T11:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.680336 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.680566 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.680654 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.680715 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.680775 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:17Z","lastTransitionTime":"2026-01-04T11:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.783834 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.784606 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.784671 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.784741 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.784807 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:17Z","lastTransitionTime":"2026-01-04T11:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.806318 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:17 crc kubenswrapper[5003]: E0104 11:49:17.806427 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.806478 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.806508 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:17 crc kubenswrapper[5003]: E0104 11:49:17.806554 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:17 crc kubenswrapper[5003]: E0104 11:49:17.806724 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.889319 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.889368 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.889377 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.889391 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.889401 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:17Z","lastTransitionTime":"2026-01-04T11:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.992740 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.992804 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.992813 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.992827 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:17 crc kubenswrapper[5003]: I0104 11:49:17.992836 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:17Z","lastTransitionTime":"2026-01-04T11:49:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.095816 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.096266 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.096409 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.096560 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.096681 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:18Z","lastTransitionTime":"2026-01-04T11:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.198937 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.198978 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.198989 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.199006 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.199048 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:18Z","lastTransitionTime":"2026-01-04T11:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.301562 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.301620 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.301638 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.301662 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.301680 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:18Z","lastTransitionTime":"2026-01-04T11:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.404320 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.404370 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.404382 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.404401 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.404418 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:18Z","lastTransitionTime":"2026-01-04T11:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.507337 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.507380 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.507392 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.507408 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.507419 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:18Z","lastTransitionTime":"2026-01-04T11:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.610577 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.610656 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.610674 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.610700 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.610717 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:18Z","lastTransitionTime":"2026-01-04T11:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.713841 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.713885 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.713894 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.713910 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.713920 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:18Z","lastTransitionTime":"2026-01-04T11:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.806124 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:49:18 crc kubenswrapper[5003]: E0104 11:49:18.806358 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.818945 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.819001 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.819062 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.819089 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.819107 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:18Z","lastTransitionTime":"2026-01-04T11:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.906065 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.906144 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.906159 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.906180 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.906197 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:18Z","lastTransitionTime":"2026-01-04T11:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:18 crc kubenswrapper[5003]: E0104 11:49:18.922291 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:18Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.928969 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.929042 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.929057 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.929073 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.929087 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:18Z","lastTransitionTime":"2026-01-04T11:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:18 crc kubenswrapper[5003]: E0104 11:49:18.949323 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:18Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.955300 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.955344 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.955356 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.955373 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.955386 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:18Z","lastTransitionTime":"2026-01-04T11:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:18 crc kubenswrapper[5003]: E0104 11:49:18.969887 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:18Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.975183 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.975251 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.975268 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.975724 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:18 crc kubenswrapper[5003]: I0104 11:49:18.975802 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:18Z","lastTransitionTime":"2026-01-04T11:49:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:18 crc kubenswrapper[5003]: E0104 11:49:18.998319 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:18Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.003449 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.003497 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.003513 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.003575 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.003594 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:19Z","lastTransitionTime":"2026-01-04T11:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:19 crc kubenswrapper[5003]: E0104 11:49:19.022252 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:19Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:19 crc kubenswrapper[5003]: E0104 11:49:19.022406 5003 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.024253 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.024294 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.024309 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.024329 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.024345 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:19Z","lastTransitionTime":"2026-01-04T11:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.127838 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.127923 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.127943 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.127979 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.128005 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:19Z","lastTransitionTime":"2026-01-04T11:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.230607 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.230684 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.230712 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.230807 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.230835 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:19Z","lastTransitionTime":"2026-01-04T11:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.334965 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.335143 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.335169 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.335200 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.335223 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:19Z","lastTransitionTime":"2026-01-04T11:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.438803 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.438856 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.438868 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.438887 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.438902 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:19Z","lastTransitionTime":"2026-01-04T11:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.542043 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.542078 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.542088 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.542131 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.542149 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:19Z","lastTransitionTime":"2026-01-04T11:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.645736 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.645773 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.645786 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.645801 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.645815 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:19Z","lastTransitionTime":"2026-01-04T11:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.749844 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.750224 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.750462 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.750670 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.750900 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:19Z","lastTransitionTime":"2026-01-04T11:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.806516 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.806908 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:19 crc kubenswrapper[5003]: E0104 11:49:19.807335 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.807488 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:19 crc kubenswrapper[5003]: E0104 11:49:19.807615 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:19 crc kubenswrapper[5003]: E0104 11:49:19.807827 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.854900 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.854945 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.854960 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.854984 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.855001 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:19Z","lastTransitionTime":"2026-01-04T11:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.958711 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.959085 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.959239 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.959414 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:19 crc kubenswrapper[5003]: I0104 11:49:19.959552 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:19Z","lastTransitionTime":"2026-01-04T11:49:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.064798 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.064899 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.064920 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.064952 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.064972 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:20Z","lastTransitionTime":"2026-01-04T11:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.168480 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.168547 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.168566 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.168595 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.168615 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:20Z","lastTransitionTime":"2026-01-04T11:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.273004 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.273141 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.273161 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.273186 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.273204 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:20Z","lastTransitionTime":"2026-01-04T11:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.376611 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.376700 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.376722 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.376762 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.376786 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:20Z","lastTransitionTime":"2026-01-04T11:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.480432 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.480504 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.480519 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.480546 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.480561 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:20Z","lastTransitionTime":"2026-01-04T11:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.584632 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.584680 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.584694 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.584721 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.584737 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:20Z","lastTransitionTime":"2026-01-04T11:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.688691 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.688738 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.688751 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.688771 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.688786 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:20Z","lastTransitionTime":"2026-01-04T11:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.792604 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.792647 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.792656 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.792674 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.792686 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:20Z","lastTransitionTime":"2026-01-04T11:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.805953 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:49:20 crc kubenswrapper[5003]: E0104 11:49:20.806174 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.894718 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.894765 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.894777 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.894794 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.894806 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:20Z","lastTransitionTime":"2026-01-04T11:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.998336 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.998397 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.998415 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.998438 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:20 crc kubenswrapper[5003]: I0104 11:49:20.998455 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:20Z","lastTransitionTime":"2026-01-04T11:49:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.101599 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.101694 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.101714 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.101746 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.101770 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:21Z","lastTransitionTime":"2026-01-04T11:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.205178 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.205271 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.205288 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.205320 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.205341 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:21Z","lastTransitionTime":"2026-01-04T11:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.308187 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.308258 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.308280 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.308307 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.308326 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:21Z","lastTransitionTime":"2026-01-04T11:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.411511 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.411575 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.411585 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.411607 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.411620 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:21Z","lastTransitionTime":"2026-01-04T11:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.515425 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.515482 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.515500 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.515526 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.515544 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:21Z","lastTransitionTime":"2026-01-04T11:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.619293 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.619357 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.619372 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.619396 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.619417 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:21Z","lastTransitionTime":"2026-01-04T11:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.723520 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.723630 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.723656 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.723695 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.723724 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:21Z","lastTransitionTime":"2026-01-04T11:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.805916 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.805963 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:21 crc kubenswrapper[5003]: E0104 11:49:21.806189 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.806258 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:21 crc kubenswrapper[5003]: E0104 11:49:21.806348 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:21 crc kubenswrapper[5003]: E0104 11:49:21.806499 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.807793 5003 scope.go:117] "RemoveContainer" containerID="39af6158a1af1262431fbc1be2e9b35acdd75e4db94250610454a284dfab69c4" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.826977 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.827074 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.827144 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.827187 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.827229 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:21Z","lastTransitionTime":"2026-01-04T11:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.931312 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.931791 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.931813 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.931843 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:21 crc kubenswrapper[5003]: I0104 11:49:21.931861 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:21Z","lastTransitionTime":"2026-01-04T11:49:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.034337 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.034401 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.034420 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.034445 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.034467 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:22Z","lastTransitionTime":"2026-01-04T11:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.137668 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.137715 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.137735 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.137757 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.137772 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:22Z","lastTransitionTime":"2026-01-04T11:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.241090 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.241142 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.241153 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.241171 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.241186 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:22Z","lastTransitionTime":"2026-01-04T11:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.343607 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.343645 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.343655 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.343668 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.343679 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:22Z","lastTransitionTime":"2026-01-04T11:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.374614 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lwxt_e40671d3-61d7-4a50-b4ea-a67e4005fc3f/ovnkube-controller/2.log" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.377851 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" event={"ID":"e40671d3-61d7-4a50-b4ea-a67e4005fc3f","Type":"ContainerStarted","Data":"f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010"} Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.378262 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.390501 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:22Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.404544 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf399dee48d6e9caa2594fb848b76a2dfc1afea412cbbf8a72f715f6c8e4e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:14Z\\\",\\\"message\\\":\\\"2026-01-04T11:48:29+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d21e0005-2905-4ac9-9eec-b3f248c9a5bd\\\\n2026-01-04T11:48:29+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d21e0005-2905-4ac9-9eec-b3f248c9a5bd to /host/opt/cni/bin/\\\\n2026-01-04T11:48:29Z [verbose] multus-daemon started\\\\n2026-01-04T11:48:29Z [verbose] Readiness Indicator file check\\\\n2026-01-04T11:49:14Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:49:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:22Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.422308 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39af6158a1af1262431fbc1be2e9b35acdd75e4db94250610454a284dfab69c4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"message\\\":\\\"Bs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-dns-operator/metrics\\\\\\\"}\\\\nI0104 11:48:54.941192 6669 services_controller.go:360] Finished syncing service metrics on namespace openshift-dns-operator for network=default : 864.963µs\\\\nI0104 11:48:54.941235 6669 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console-operator/metrics\\\\\\\"}\\\\nI0104 11:48:54.941255 6669 factory.go:656] Stopping watch factory\\\\nI0104 11:48:54.941272 6669 ovnkube.go:599] Stopped ovnkube\\\\nI0104 11:48:54.941256 6669 services_controller.go:360] Finished syncing service metrics on namespace openshift-console-operator for network=default : 992.936µs\\\\nI0104 11:48:54.941304 6669 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-image-registry/image-registry\\\\\\\"}\\\\nI0104 11:48:54.941318 6669 services_controller.go:360] Finished syncing service image-registry on namespace openshift-image-registry for network=default : 854.682µs\\\\nI0104 11:48:54.941321 6669 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0104 11:48:54.941410 6669 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:49:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:22Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.432422 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:22Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.446044 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65e8a928-4671-4299-840a-812a83f36ba1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e78602107a0c94d79560902c8a3636e32998d67b852202fe4329ddea3c93d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfebfbfc8fadf934d7b9ee31dfcf4533f17e01c47d9a96ad86de85c19f1bb983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9x5wb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:22Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.446547 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.446756 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.446769 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.446788 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.446802 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:22Z","lastTransitionTime":"2026-01-04T11:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.474307 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:22Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.490410 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:22Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.501497 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:22Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.515357 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b85db2c1c247bb94afa60d23ada02b9349e9a716de56490ce5c51a76dce9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:22Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.536813 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:22Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.549424 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.549467 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.549481 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.549503 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.549516 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:22Z","lastTransitionTime":"2026-01-04T11:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.551234 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01ede350-1819-4157-b857-c68a6a3457dd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2096947ed145a7b9482af4491217b3657fbbc74654790676a06f70c938e59d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a519008319f92603b4fd769aa06e46ece0161210016ae942dde01d828e4153fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b01101e6a58ad97c5626e5e6b439ac29a0c28384bfc9027b58bc7713942206d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b0363ec1bdedddf0eca77d19de840253fdec6bfacc0ba2a2dde00536288c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19b0363ec1bdedddf0eca77d19de840253fdec6bfacc0ba2a2dde00536288c5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:22Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.566323 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:22Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.580282 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:22Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.592356 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:22Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.603049 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:22Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.612862 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0f60dfb-d71c-415e-840b-4e5cef6037b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c0d47ed48809e56449b68ddfc86eb427381be87a8df60f581867c82d8c44890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://207fc3c1ffae679d524ce82df81b41f08d722b0923d520ef075f213bc6b71cde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://207fc3c1ffae679d524ce82df81b41f08d722b0923d520ef075f213bc6b71cde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:22Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.624525 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:22Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.632667 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2zwh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2zwh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:22Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.643039 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:22Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.652382 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.652433 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.652446 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.652464 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.652480 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:22Z","lastTransitionTime":"2026-01-04T11:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.755752 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.755807 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.755820 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.755838 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.755869 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:22Z","lastTransitionTime":"2026-01-04T11:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.805787 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:49:22 crc kubenswrapper[5003]: E0104 11:49:22.805929 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.859469 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.859516 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.859530 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.859553 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.859568 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:22Z","lastTransitionTime":"2026-01-04T11:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.962109 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.962174 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.962184 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.962198 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:22 crc kubenswrapper[5003]: I0104 11:49:22.962211 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:22Z","lastTransitionTime":"2026-01-04T11:49:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.065048 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.065088 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.065097 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.065114 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.065124 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:23Z","lastTransitionTime":"2026-01-04T11:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.168331 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.168443 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.168452 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.168471 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.168484 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:23Z","lastTransitionTime":"2026-01-04T11:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.271536 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.271668 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.271686 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.271704 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.271718 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:23Z","lastTransitionTime":"2026-01-04T11:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.374776 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.374820 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.374831 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.374851 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.374862 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:23Z","lastTransitionTime":"2026-01-04T11:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.382143 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lwxt_e40671d3-61d7-4a50-b4ea-a67e4005fc3f/ovnkube-controller/3.log" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.382778 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lwxt_e40671d3-61d7-4a50-b4ea-a67e4005fc3f/ovnkube-controller/2.log" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.385653 5003 generic.go:334] "Generic (PLEG): container finished" podID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerID="f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010" exitCode=1 Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.385701 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" event={"ID":"e40671d3-61d7-4a50-b4ea-a67e4005fc3f","Type":"ContainerDied","Data":"f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010"} Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.385743 5003 scope.go:117] "RemoveContainer" containerID="39af6158a1af1262431fbc1be2e9b35acdd75e4db94250610454a284dfab69c4" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.386549 5003 scope.go:117] "RemoveContainer" containerID="f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010" Jan 04 11:49:23 crc kubenswrapper[5003]: E0104 11:49:23.386732 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2lwxt_openshift-ovn-kubernetes(e40671d3-61d7-4a50-b4ea-a67e4005fc3f)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.450202 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39af6158a1af1262431fbc1be2e9b35acdd75e4db94250610454a284dfab69c4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"message\\\":\\\"Bs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-dns-operator/metrics\\\\\\\"}\\\\nI0104 11:48:54.941192 6669 services_controller.go:360] Finished syncing service metrics on namespace openshift-dns-operator for network=default : 864.963µs\\\\nI0104 11:48:54.941235 6669 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-console-operator/metrics\\\\\\\"}\\\\nI0104 11:48:54.941255 6669 factory.go:656] Stopping watch factory\\\\nI0104 11:48:54.941272 6669 ovnkube.go:599] Stopped ovnkube\\\\nI0104 11:48:54.941256 6669 services_controller.go:360] Finished syncing service metrics on namespace openshift-console-operator for network=default : 992.936µs\\\\nI0104 11:48:54.941304 6669 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-image-registry/image-registry\\\\\\\"}\\\\nI0104 11:48:54.941318 6669 services_controller.go:360] Finished syncing service image-registry on namespace openshift-image-registry for network=default : 854.682µs\\\\nI0104 11:48:54.941321 6669 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0104 11:48:54.941410 6669 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:23Z\\\",\\\"message\\\":\\\"for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:22Z is after 2025-08-24T17:21:41Z]\\\\nI0104 11:49:22.703323 7062 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-apiserver-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"8b82f026-5975-4a1b-bb18-08d5d51147ec\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-apiserver-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-apiserver-operator/metrics_TCP_clus\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:49:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:23Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.461449 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:23Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.472224 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65e8a928-4671-4299-840a-812a83f36ba1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e78602107a0c94d79560902c8a3636e32998d67b852202fe4329ddea3c93d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfebfbfc8fadf934d7b9ee31dfcf4533f17e01c47d9a96ad86de85c19f1bb983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9x5wb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:23Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.480186 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.480235 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.480249 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.480267 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.480287 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:23Z","lastTransitionTime":"2026-01-04T11:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.488510 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:23Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.501683 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:23Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.520825 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf399dee48d6e9caa2594fb848b76a2dfc1afea412cbbf8a72f715f6c8e4e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:14Z\\\",\\\"message\\\":\\\"2026-01-04T11:48:29+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d21e0005-2905-4ac9-9eec-b3f248c9a5bd\\\\n2026-01-04T11:48:29+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d21e0005-2905-4ac9-9eec-b3f248c9a5bd to /host/opt/cni/bin/\\\\n2026-01-04T11:48:29Z [verbose] multus-daemon started\\\\n2026-01-04T11:48:29Z [verbose] Readiness Indicator file check\\\\n2026-01-04T11:49:14Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:49:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:23Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.537033 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b85db2c1c247bb94afa60d23ada02b9349e9a716de56490ce5c51a76dce9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:23Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.563698 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:23Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.580580 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:23Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.583774 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.583830 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.583851 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.583879 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.583899 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:23Z","lastTransitionTime":"2026-01-04T11:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.592344 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:23Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.607242 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:23Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.624589 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:23Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.636988 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:23Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.648505 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0f60dfb-d71c-415e-840b-4e5cef6037b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c0d47ed48809e56449b68ddfc86eb427381be87a8df60f581867c82d8c44890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://207fc3c1ffae679d524ce82df81b41f08d722b0923d520ef075f213bc6b71cde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://207fc3c1ffae679d524ce82df81b41f08d722b0923d520ef075f213bc6b71cde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:23Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.665528 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01ede350-1819-4157-b857-c68a6a3457dd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2096947ed145a7b9482af4491217b3657fbbc74654790676a06f70c938e59d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a519008319f92603b4fd769aa06e46ece0161210016ae942dde01d828e4153fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b01101e6a58ad97c5626e5e6b439ac29a0c28384bfc9027b58bc7713942206d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b0363ec1bdedddf0eca77d19de840253fdec6bfacc0ba2a2dde00536288c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19b0363ec1bdedddf0eca77d19de840253fdec6bfacc0ba2a2dde00536288c5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:23Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.677478 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:23Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.686940 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.687000 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.687051 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.687088 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.687106 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:23Z","lastTransitionTime":"2026-01-04T11:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.693932 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:23Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.707197 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:23Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.720472 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2zwh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2zwh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:23Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.789796 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.789858 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.789876 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.789901 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.789918 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:23Z","lastTransitionTime":"2026-01-04T11:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.806176 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.806209 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.806250 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:23 crc kubenswrapper[5003]: E0104 11:49:23.806367 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:23 crc kubenswrapper[5003]: E0104 11:49:23.806681 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:23 crc kubenswrapper[5003]: E0104 11:49:23.806568 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.892434 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.892487 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.892500 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.892515 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.892528 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:23Z","lastTransitionTime":"2026-01-04T11:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.995219 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.995316 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.995335 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.995360 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:23 crc kubenswrapper[5003]: I0104 11:49:23.995376 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:23Z","lastTransitionTime":"2026-01-04T11:49:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.098075 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.098133 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.098150 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.098178 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.098194 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:24Z","lastTransitionTime":"2026-01-04T11:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.201095 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.201129 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.201140 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.201156 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.201166 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:24Z","lastTransitionTime":"2026-01-04T11:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.304853 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.304901 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.304912 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.304929 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.304943 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:24Z","lastTransitionTime":"2026-01-04T11:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.391953 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lwxt_e40671d3-61d7-4a50-b4ea-a67e4005fc3f/ovnkube-controller/3.log" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.397804 5003 scope.go:117] "RemoveContainer" containerID="f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010" Jan 04 11:49:24 crc kubenswrapper[5003]: E0104 11:49:24.398361 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2lwxt_openshift-ovn-kubernetes(e40671d3-61d7-4a50-b4ea-a67e4005fc3f)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.407475 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.407534 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.407552 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.407583 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.407606 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:24Z","lastTransitionTime":"2026-01-04T11:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.417535 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b85db2c1c247bb94afa60d23ada02b9349e9a716de56490ce5c51a76dce9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.455640 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.478170 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.500238 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.516463 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.516577 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.516596 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.516624 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.516643 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:24Z","lastTransitionTime":"2026-01-04T11:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.520814 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.537923 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.552304 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.568050 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0f60dfb-d71c-415e-840b-4e5cef6037b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c0d47ed48809e56449b68ddfc86eb427381be87a8df60f581867c82d8c44890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://207fc3c1ffae679d524ce82df81b41f08d722b0923d520ef075f213bc6b71cde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://207fc3c1ffae679d524ce82df81b41f08d722b0923d520ef075f213bc6b71cde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.585060 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01ede350-1819-4157-b857-c68a6a3457dd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2096947ed145a7b9482af4491217b3657fbbc74654790676a06f70c938e59d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a519008319f92603b4fd769aa06e46ece0161210016ae942dde01d828e4153fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b01101e6a58ad97c5626e5e6b439ac29a0c28384bfc9027b58bc7713942206d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b0363ec1bdedddf0eca77d19de840253fdec6bfacc0ba2a2dde00536288c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19b0363ec1bdedddf0eca77d19de840253fdec6bfacc0ba2a2dde00536288c5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.601051 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.616410 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.619950 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.620054 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.620076 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.620103 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.620135 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:24Z","lastTransitionTime":"2026-01-04T11:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.635280 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.650201 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2zwh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2zwh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.672825 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:23Z\\\",\\\"message\\\":\\\"for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:22Z is after 2025-08-24T17:21:41Z]\\\\nI0104 11:49:22.703323 7062 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-apiserver-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"8b82f026-5975-4a1b-bb18-08d5d51147ec\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-apiserver-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-apiserver-operator/metrics_TCP_clus\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:49:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2lwxt_openshift-ovn-kubernetes(e40671d3-61d7-4a50-b4ea-a67e4005fc3f)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.685486 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.699739 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65e8a928-4671-4299-840a-812a83f36ba1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e78602107a0c94d79560902c8a3636e32998d67b852202fe4329ddea3c93d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfebfbfc8fadf934d7b9ee31dfcf4533f17e01c47d9a96ad86de85c19f1bb983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9x5wb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.714591 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.727574 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.727650 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.727671 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.727698 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.727724 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:24Z","lastTransitionTime":"2026-01-04T11:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.735941 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.755764 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf399dee48d6e9caa2594fb848b76a2dfc1afea412cbbf8a72f715f6c8e4e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:14Z\\\",\\\"message\\\":\\\"2026-01-04T11:48:29+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d21e0005-2905-4ac9-9eec-b3f248c9a5bd\\\\n2026-01-04T11:48:29+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d21e0005-2905-4ac9-9eec-b3f248c9a5bd to /host/opt/cni/bin/\\\\n2026-01-04T11:48:29Z [verbose] multus-daemon started\\\\n2026-01-04T11:48:29Z [verbose] Readiness Indicator file check\\\\n2026-01-04T11:49:14Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:49:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.806939 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:49:24 crc kubenswrapper[5003]: E0104 11:49:24.807622 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.831370 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.831728 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.831774 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.831789 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.831815 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.831832 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:24Z","lastTransitionTime":"2026-01-04T11:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.852415 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.875570 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.896609 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b85db2c1c247bb94afa60d23ada02b9349e9a716de56490ce5c51a76dce9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.913605 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.927283 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.934044 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.934084 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.934097 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.934130 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.934145 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:24Z","lastTransitionTime":"2026-01-04T11:49:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.938613 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0f60dfb-d71c-415e-840b-4e5cef6037b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c0d47ed48809e56449b68ddfc86eb427381be87a8df60f581867c82d8c44890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://207fc3c1ffae679d524ce82df81b41f08d722b0923d520ef075f213bc6b71cde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://207fc3c1ffae679d524ce82df81b41f08d722b0923d520ef075f213bc6b71cde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.955314 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01ede350-1819-4157-b857-c68a6a3457dd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2096947ed145a7b9482af4491217b3657fbbc74654790676a06f70c938e59d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a519008319f92603b4fd769aa06e46ece0161210016ae942dde01d828e4153fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b01101e6a58ad97c5626e5e6b439ac29a0c28384bfc9027b58bc7713942206d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b0363ec1bdedddf0eca77d19de840253fdec6bfacc0ba2a2dde00536288c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19b0363ec1bdedddf0eca77d19de840253fdec6bfacc0ba2a2dde00536288c5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.971511 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:24 crc kubenswrapper[5003]: I0104 11:49:24.989805 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:24Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.003919 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:25Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.018089 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:25Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.030071 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2zwh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2zwh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:25Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.036714 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.036806 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.036835 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.036870 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.036898 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:25Z","lastTransitionTime":"2026-01-04T11:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.041739 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:25Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.054990 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65e8a928-4671-4299-840a-812a83f36ba1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e78602107a0c94d79560902c8a3636e32998d67b852202fe4329ddea3c93d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfebfbfc8fadf934d7b9ee31dfcf4533f17e01c47d9a96ad86de85c19f1bb983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9x5wb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:25Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.070066 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:25Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.087367 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:25Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.104267 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf399dee48d6e9caa2594fb848b76a2dfc1afea412cbbf8a72f715f6c8e4e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:14Z\\\",\\\"message\\\":\\\"2026-01-04T11:48:29+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d21e0005-2905-4ac9-9eec-b3f248c9a5bd\\\\n2026-01-04T11:48:29+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d21e0005-2905-4ac9-9eec-b3f248c9a5bd to /host/opt/cni/bin/\\\\n2026-01-04T11:48:29Z [verbose] multus-daemon started\\\\n2026-01-04T11:48:29Z [verbose] Readiness Indicator file check\\\\n2026-01-04T11:49:14Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:49:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:25Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.126070 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:23Z\\\",\\\"message\\\":\\\"for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:22Z is after 2025-08-24T17:21:41Z]\\\\nI0104 11:49:22.703323 7062 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-apiserver-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"8b82f026-5975-4a1b-bb18-08d5d51147ec\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-apiserver-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-apiserver-operator/metrics_TCP_clus\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:49:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2lwxt_openshift-ovn-kubernetes(e40671d3-61d7-4a50-b4ea-a67e4005fc3f)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:25Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.140950 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.141067 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.141263 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.141296 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.141321 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:25Z","lastTransitionTime":"2026-01-04T11:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.243967 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.244060 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.244074 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.244092 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.244105 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:25Z","lastTransitionTime":"2026-01-04T11:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.347213 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.347300 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.347322 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.347355 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.347377 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:25Z","lastTransitionTime":"2026-01-04T11:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.452767 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.452817 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.452830 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.452847 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.452865 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:25Z","lastTransitionTime":"2026-01-04T11:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.555964 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.556000 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.556041 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.556054 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.556062 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:25Z","lastTransitionTime":"2026-01-04T11:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.660100 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.660186 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.660209 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.660238 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.660259 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:25Z","lastTransitionTime":"2026-01-04T11:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.763444 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.763491 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.763502 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.763521 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.763533 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:25Z","lastTransitionTime":"2026-01-04T11:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.806679 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.806784 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.806679 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:25 crc kubenswrapper[5003]: E0104 11:49:25.806925 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:25 crc kubenswrapper[5003]: E0104 11:49:25.807007 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:25 crc kubenswrapper[5003]: E0104 11:49:25.807216 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.867486 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.867566 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.867593 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.867630 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.867655 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:25Z","lastTransitionTime":"2026-01-04T11:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.972163 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.972240 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.972263 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.972295 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:25 crc kubenswrapper[5003]: I0104 11:49:25.972315 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:25Z","lastTransitionTime":"2026-01-04T11:49:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.076295 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.076359 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.076376 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.076402 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.076424 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:26Z","lastTransitionTime":"2026-01-04T11:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.179718 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.179771 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.179786 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.179811 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.179826 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:26Z","lastTransitionTime":"2026-01-04T11:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.283492 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.283550 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.283570 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.283608 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.283627 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:26Z","lastTransitionTime":"2026-01-04T11:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.387627 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.388084 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.388242 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.388410 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.388548 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:26Z","lastTransitionTime":"2026-01-04T11:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.491710 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.491794 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.491815 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.491848 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.491872 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:26Z","lastTransitionTime":"2026-01-04T11:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.595104 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.595156 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.595166 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.595183 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.595194 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:26Z","lastTransitionTime":"2026-01-04T11:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.697922 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.698051 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.698080 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.698117 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.698146 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:26Z","lastTransitionTime":"2026-01-04T11:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.801207 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.801280 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.801300 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.801326 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.801348 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:26Z","lastTransitionTime":"2026-01-04T11:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.806702 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:49:26 crc kubenswrapper[5003]: E0104 11:49:26.807080 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.904209 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.904266 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.904280 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.904299 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:26 crc kubenswrapper[5003]: I0104 11:49:26.904313 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:26Z","lastTransitionTime":"2026-01-04T11:49:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.007715 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.007780 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.007799 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.007825 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.007846 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:27Z","lastTransitionTime":"2026-01-04T11:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.111490 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.111554 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.111574 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.111606 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.111629 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:27Z","lastTransitionTime":"2026-01-04T11:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.214855 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.214927 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.214944 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.214968 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.214986 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:27Z","lastTransitionTime":"2026-01-04T11:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.318301 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.318347 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.318362 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.318381 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.318395 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:27Z","lastTransitionTime":"2026-01-04T11:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.420812 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.420882 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.420907 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.420938 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.420959 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:27Z","lastTransitionTime":"2026-01-04T11:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.523723 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.523805 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.523830 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.523863 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.523884 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:27Z","lastTransitionTime":"2026-01-04T11:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.626965 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.627066 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.627079 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.627099 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.627114 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:27Z","lastTransitionTime":"2026-01-04T11:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.730954 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.731042 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.731060 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.731080 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.731093 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:27Z","lastTransitionTime":"2026-01-04T11:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.800855 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.801009 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.801051 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:27 crc kubenswrapper[5003]: E0104 11:49:27.801115 5003 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 04 11:49:27 crc kubenswrapper[5003]: E0104 11:49:27.801159 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-04 11:50:31.801146829 +0000 UTC m=+147.274176670 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 04 11:49:27 crc kubenswrapper[5003]: E0104 11:49:27.801321 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:31.801313853 +0000 UTC m=+147.274343694 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:49:27 crc kubenswrapper[5003]: E0104 11:49:27.801374 5003 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 04 11:49:27 crc kubenswrapper[5003]: E0104 11:49:27.801394 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-04 11:50:31.801388535 +0000 UTC m=+147.274418376 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.806112 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.806151 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.806180 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:27 crc kubenswrapper[5003]: E0104 11:49:27.806263 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:27 crc kubenswrapper[5003]: E0104 11:49:27.806200 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:27 crc kubenswrapper[5003]: E0104 11:49:27.806373 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.833299 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.833341 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.833352 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.833368 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.833379 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:27Z","lastTransitionTime":"2026-01-04T11:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.902177 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:27 crc kubenswrapper[5003]: E0104 11:49:27.902553 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 04 11:49:27 crc kubenswrapper[5003]: E0104 11:49:27.902612 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 04 11:49:27 crc kubenswrapper[5003]: E0104 11:49:27.902641 5003 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:49:27 crc kubenswrapper[5003]: E0104 11:49:27.902764 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-04 11:50:31.90272612 +0000 UTC m=+147.375756001 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.902923 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:27 crc kubenswrapper[5003]: E0104 11:49:27.903108 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 04 11:49:27 crc kubenswrapper[5003]: E0104 11:49:27.903139 5003 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 04 11:49:27 crc kubenswrapper[5003]: E0104 11:49:27.903151 5003 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:49:27 crc kubenswrapper[5003]: E0104 11:49:27.903201 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-04 11:50:31.903187372 +0000 UTC m=+147.376217203 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.936548 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.936867 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.937092 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.937305 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:27 crc kubenswrapper[5003]: I0104 11:49:27.937519 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:27Z","lastTransitionTime":"2026-01-04T11:49:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.040782 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.040827 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.040840 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.040858 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.040869 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:28Z","lastTransitionTime":"2026-01-04T11:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.143590 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.143928 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.144135 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.144277 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.144404 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:28Z","lastTransitionTime":"2026-01-04T11:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.247791 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.248093 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.248156 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.248242 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.248319 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:28Z","lastTransitionTime":"2026-01-04T11:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.350921 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.351055 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.351101 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.351167 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.351185 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:28Z","lastTransitionTime":"2026-01-04T11:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.454353 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.454423 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.454440 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.454464 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.454483 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:28Z","lastTransitionTime":"2026-01-04T11:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.557545 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.558190 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.558224 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.558252 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.558270 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:28Z","lastTransitionTime":"2026-01-04T11:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.661470 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.661547 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.661570 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.661600 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.661619 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:28Z","lastTransitionTime":"2026-01-04T11:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.764949 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.765094 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.765117 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.765147 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.765171 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:28Z","lastTransitionTime":"2026-01-04T11:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.805833 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:49:28 crc kubenswrapper[5003]: E0104 11:49:28.806140 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.867845 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.867901 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.867918 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.867943 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.867963 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:28Z","lastTransitionTime":"2026-01-04T11:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.970229 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.970268 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.970279 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.970292 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:28 crc kubenswrapper[5003]: I0104 11:49:28.970301 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:28Z","lastTransitionTime":"2026-01-04T11:49:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.073076 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.073142 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.073168 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.073201 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.073225 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:29Z","lastTransitionTime":"2026-01-04T11:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.177165 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.177239 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.177260 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.177286 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.177305 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:29Z","lastTransitionTime":"2026-01-04T11:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.281222 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.281288 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.281304 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.281329 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.281349 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:29Z","lastTransitionTime":"2026-01-04T11:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.307571 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.307646 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.307672 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.307702 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.307725 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:29Z","lastTransitionTime":"2026-01-04T11:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:29 crc kubenswrapper[5003]: E0104 11:49:29.326422 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.332211 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.332258 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.332277 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.332315 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.332334 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:29Z","lastTransitionTime":"2026-01-04T11:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:29 crc kubenswrapper[5003]: E0104 11:49:29.352197 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.355925 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.355985 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.356005 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.356057 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.356075 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:29Z","lastTransitionTime":"2026-01-04T11:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:29 crc kubenswrapper[5003]: E0104 11:49:29.378253 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.382897 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.382948 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.382973 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.382999 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.383127 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:29Z","lastTransitionTime":"2026-01-04T11:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:29 crc kubenswrapper[5003]: E0104 11:49:29.401874 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.407533 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.407741 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.407791 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.407823 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.407845 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:29Z","lastTransitionTime":"2026-01-04T11:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:29 crc kubenswrapper[5003]: E0104 11:49:29.427755 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:29Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:29 crc kubenswrapper[5003]: E0104 11:49:29.427973 5003 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.430661 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.430719 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.430738 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.430763 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.430781 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:29Z","lastTransitionTime":"2026-01-04T11:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.534009 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.534128 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.534155 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.534185 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.534208 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:29Z","lastTransitionTime":"2026-01-04T11:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.638170 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.638227 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.638244 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.638269 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.638286 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:29Z","lastTransitionTime":"2026-01-04T11:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.740821 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.740886 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.740908 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.740935 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.740954 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:29Z","lastTransitionTime":"2026-01-04T11:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.806571 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.806706 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:29 crc kubenswrapper[5003]: E0104 11:49:29.806799 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.806847 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:29 crc kubenswrapper[5003]: E0104 11:49:29.807050 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:29 crc kubenswrapper[5003]: E0104 11:49:29.807213 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.848262 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.848419 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.848708 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.848750 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.848769 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:29Z","lastTransitionTime":"2026-01-04T11:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.952638 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.952686 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.952698 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.952716 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:29 crc kubenswrapper[5003]: I0104 11:49:29.952727 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:29Z","lastTransitionTime":"2026-01-04T11:49:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.055829 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.055870 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.055879 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.055913 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.055924 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:30Z","lastTransitionTime":"2026-01-04T11:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.158252 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.158306 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.158321 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.158340 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.158357 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:30Z","lastTransitionTime":"2026-01-04T11:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.260839 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.260882 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.260918 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.260936 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.260947 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:30Z","lastTransitionTime":"2026-01-04T11:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.363583 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.363721 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.363744 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.363771 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.363791 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:30Z","lastTransitionTime":"2026-01-04T11:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.467792 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.467846 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.467864 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.467892 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.467916 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:30Z","lastTransitionTime":"2026-01-04T11:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.570636 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.570688 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.570701 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.570719 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.570730 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:30Z","lastTransitionTime":"2026-01-04T11:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.673226 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.673287 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.673302 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.673327 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.673345 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:30Z","lastTransitionTime":"2026-01-04T11:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.776529 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.776574 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.776586 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.776602 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.776615 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:30Z","lastTransitionTime":"2026-01-04T11:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.806573 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:49:30 crc kubenswrapper[5003]: E0104 11:49:30.806781 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.879476 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.879525 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.879545 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.879572 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.879595 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:30Z","lastTransitionTime":"2026-01-04T11:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.981923 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.981973 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.981991 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.982041 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:30 crc kubenswrapper[5003]: I0104 11:49:30.982058 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:30Z","lastTransitionTime":"2026-01-04T11:49:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.084917 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.084968 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.084985 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.085033 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.085053 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:31Z","lastTransitionTime":"2026-01-04T11:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.188942 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.189057 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.189076 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.189099 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.189118 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:31Z","lastTransitionTime":"2026-01-04T11:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.292068 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.292130 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.292147 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.292174 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.292191 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:31Z","lastTransitionTime":"2026-01-04T11:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.395218 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.395270 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.395290 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.395314 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.395331 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:31Z","lastTransitionTime":"2026-01-04T11:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.498477 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.498528 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.498544 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.498567 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.498586 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:31Z","lastTransitionTime":"2026-01-04T11:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.601883 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.601959 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.601970 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.601990 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.602003 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:31Z","lastTransitionTime":"2026-01-04T11:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.704969 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.705069 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.705087 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.705114 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.705132 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:31Z","lastTransitionTime":"2026-01-04T11:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.806498 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.806616 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:31 crc kubenswrapper[5003]: E0104 11:49:31.806700 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.806615 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:31 crc kubenswrapper[5003]: E0104 11:49:31.806842 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:31 crc kubenswrapper[5003]: E0104 11:49:31.807125 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.808644 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.808692 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.808709 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.808734 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.808752 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:31Z","lastTransitionTime":"2026-01-04T11:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.912171 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.912241 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.912269 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.912295 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:31 crc kubenswrapper[5003]: I0104 11:49:31.912313 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:31Z","lastTransitionTime":"2026-01-04T11:49:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.015738 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.015796 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.015813 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.015837 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.015853 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:32Z","lastTransitionTime":"2026-01-04T11:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.119638 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.119713 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.119733 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.119762 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.119782 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:32Z","lastTransitionTime":"2026-01-04T11:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.223666 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.223749 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.223767 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.223798 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.223819 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:32Z","lastTransitionTime":"2026-01-04T11:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.327450 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.327876 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.328090 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.328255 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.328425 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:32Z","lastTransitionTime":"2026-01-04T11:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.431631 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.432213 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.432451 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.432673 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.432872 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:32Z","lastTransitionTime":"2026-01-04T11:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.542387 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.542926 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.543890 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.544158 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.544351 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:32Z","lastTransitionTime":"2026-01-04T11:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.647597 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.648101 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.648315 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.648459 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.648588 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:32Z","lastTransitionTime":"2026-01-04T11:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.752530 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.752592 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.752610 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.752636 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.752655 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:32Z","lastTransitionTime":"2026-01-04T11:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.806793 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:49:32 crc kubenswrapper[5003]: E0104 11:49:32.807150 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.855729 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.856148 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.856329 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.856470 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.856639 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:32Z","lastTransitionTime":"2026-01-04T11:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.960753 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.960782 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.960791 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.960804 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:32 crc kubenswrapper[5003]: I0104 11:49:32.960814 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:32Z","lastTransitionTime":"2026-01-04T11:49:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.064139 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.064479 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.064565 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.064652 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.064723 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:33Z","lastTransitionTime":"2026-01-04T11:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.167774 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.167800 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.167808 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.167820 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.167829 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:33Z","lastTransitionTime":"2026-01-04T11:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.270593 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.270685 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.270707 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.270740 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.270763 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:33Z","lastTransitionTime":"2026-01-04T11:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.375245 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.375573 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.375681 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.375767 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.375845 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:33Z","lastTransitionTime":"2026-01-04T11:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.479424 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.479804 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.479814 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.479828 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.479839 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:33Z","lastTransitionTime":"2026-01-04T11:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.583050 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.583129 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.583140 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.583156 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.583185 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:33Z","lastTransitionTime":"2026-01-04T11:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.685528 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.685578 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.685587 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.685600 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.685608 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:33Z","lastTransitionTime":"2026-01-04T11:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.788284 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.788331 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.788345 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.788361 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.788374 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:33Z","lastTransitionTime":"2026-01-04T11:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.805988 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.805988 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:33 crc kubenswrapper[5003]: E0104 11:49:33.806148 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:33 crc kubenswrapper[5003]: E0104 11:49:33.806186 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.806035 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:33 crc kubenswrapper[5003]: E0104 11:49:33.806239 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.890634 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.890673 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.890684 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.890701 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.890714 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:33Z","lastTransitionTime":"2026-01-04T11:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.995377 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.995421 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.995433 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.995449 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:33 crc kubenswrapper[5003]: I0104 11:49:33.995462 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:33Z","lastTransitionTime":"2026-01-04T11:49:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.098217 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.098254 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.098264 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.098302 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.098315 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:34Z","lastTransitionTime":"2026-01-04T11:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.200961 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.201061 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.201072 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.201092 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.201108 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:34Z","lastTransitionTime":"2026-01-04T11:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.304939 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.305084 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.305161 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.305205 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.305277 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:34Z","lastTransitionTime":"2026-01-04T11:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.409969 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.410113 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.410138 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.410176 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.410203 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:34Z","lastTransitionTime":"2026-01-04T11:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.513065 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.513117 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.513131 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.513147 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.513161 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:34Z","lastTransitionTime":"2026-01-04T11:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.616140 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.616219 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.616241 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.616271 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.616293 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:34Z","lastTransitionTime":"2026-01-04T11:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.719255 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.719296 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.719306 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.719325 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.719335 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:34Z","lastTransitionTime":"2026-01-04T11:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.806548 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:49:34 crc kubenswrapper[5003]: E0104 11:49:34.808425 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.822219 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.822293 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.822320 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.822351 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.822375 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:34Z","lastTransitionTime":"2026-01-04T11:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.830396 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65e8a928-4671-4299-840a-812a83f36ba1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e78602107a0c94d79560902c8a3636e32998d67b852202fe4329ddea3c93d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfebfbfc8fadf934d7b9ee31dfcf4533f17e01c47d9a96ad86de85c19f1bb983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9x5wb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.853541 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.871146 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.896598 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf399dee48d6e9caa2594fb848b76a2dfc1afea412cbbf8a72f715f6c8e4e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:14Z\\\",\\\"message\\\":\\\"2026-01-04T11:48:29+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d21e0005-2905-4ac9-9eec-b3f248c9a5bd\\\\n2026-01-04T11:48:29+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d21e0005-2905-4ac9-9eec-b3f248c9a5bd to /host/opt/cni/bin/\\\\n2026-01-04T11:48:29Z [verbose] multus-daemon started\\\\n2026-01-04T11:48:29Z [verbose] Readiness Indicator file check\\\\n2026-01-04T11:49:14Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:49:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.924835 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:23Z\\\",\\\"message\\\":\\\"for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:22Z is after 2025-08-24T17:21:41Z]\\\\nI0104 11:49:22.703323 7062 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-apiserver-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"8b82f026-5975-4a1b-bb18-08d5d51147ec\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-apiserver-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-apiserver-operator/metrics_TCP_clus\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:49:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2lwxt_openshift-ovn-kubernetes(e40671d3-61d7-4a50-b4ea-a67e4005fc3f)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.926592 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.926675 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.926698 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.926736 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.926762 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:34Z","lastTransitionTime":"2026-01-04T11:49:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.942281 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.975035 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:34 crc kubenswrapper[5003]: I0104 11:49:34.994192 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:34Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.009343 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:35Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.031609 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.031649 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.031662 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.031682 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.031696 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:35Z","lastTransitionTime":"2026-01-04T11:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.032377 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b85db2c1c247bb94afa60d23ada02b9349e9a716de56490ce5c51a76dce9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:35Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.046068 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:35Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.063136 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0f60dfb-d71c-415e-840b-4e5cef6037b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c0d47ed48809e56449b68ddfc86eb427381be87a8df60f581867c82d8c44890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://207fc3c1ffae679d524ce82df81b41f08d722b0923d520ef075f213bc6b71cde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://207fc3c1ffae679d524ce82df81b41f08d722b0923d520ef075f213bc6b71cde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:35Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.083758 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01ede350-1819-4157-b857-c68a6a3457dd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2096947ed145a7b9482af4491217b3657fbbc74654790676a06f70c938e59d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a519008319f92603b4fd769aa06e46ece0161210016ae942dde01d828e4153fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b01101e6a58ad97c5626e5e6b439ac29a0c28384bfc9027b58bc7713942206d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b0363ec1bdedddf0eca77d19de840253fdec6bfacc0ba2a2dde00536288c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19b0363ec1bdedddf0eca77d19de840253fdec6bfacc0ba2a2dde00536288c5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:35Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.104894 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:35Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.124533 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:35Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.134037 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.134097 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.134142 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.134168 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.134186 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:35Z","lastTransitionTime":"2026-01-04T11:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.142389 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:35Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.162521 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:35Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.181887 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:35Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.196937 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2zwh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2zwh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:35Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.237550 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.237614 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.237631 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.237656 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.237676 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:35Z","lastTransitionTime":"2026-01-04T11:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.340535 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.340574 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.340583 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.340740 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.340757 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:35Z","lastTransitionTime":"2026-01-04T11:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.443850 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.443905 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.443919 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.443938 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.443956 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:35Z","lastTransitionTime":"2026-01-04T11:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.547500 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.547575 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.547595 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.547633 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.547654 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:35Z","lastTransitionTime":"2026-01-04T11:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.651647 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.651723 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.651742 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.651773 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.651794 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:35Z","lastTransitionTime":"2026-01-04T11:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.754828 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.754905 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.754925 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.754962 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.754990 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:35Z","lastTransitionTime":"2026-01-04T11:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.805789 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.805938 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.806257 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:35 crc kubenswrapper[5003]: E0104 11:49:35.806732 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:35 crc kubenswrapper[5003]: E0104 11:49:35.806845 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:35 crc kubenswrapper[5003]: E0104 11:49:35.806971 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.857909 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.857980 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.858003 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.858073 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.858105 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:35Z","lastTransitionTime":"2026-01-04T11:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.966352 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.966412 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.966428 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.966453 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:35 crc kubenswrapper[5003]: I0104 11:49:35.966474 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:35Z","lastTransitionTime":"2026-01-04T11:49:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.070389 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.070486 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.070513 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.070539 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.070558 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:36Z","lastTransitionTime":"2026-01-04T11:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.174538 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.174607 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.174625 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.174652 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.174670 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:36Z","lastTransitionTime":"2026-01-04T11:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.277774 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.277864 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.277886 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.277918 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.277960 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:36Z","lastTransitionTime":"2026-01-04T11:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.381977 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.382126 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.382170 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.382208 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.382235 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:36Z","lastTransitionTime":"2026-01-04T11:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.485252 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.485315 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.485334 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.485394 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.485416 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:36Z","lastTransitionTime":"2026-01-04T11:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.588579 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.588638 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.588650 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.588670 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.588683 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:36Z","lastTransitionTime":"2026-01-04T11:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.691392 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.691453 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.691466 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.691486 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.691501 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:36Z","lastTransitionTime":"2026-01-04T11:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.794918 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.794988 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.795006 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.795065 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.795084 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:36Z","lastTransitionTime":"2026-01-04T11:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.805858 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:49:36 crc kubenswrapper[5003]: E0104 11:49:36.806161 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.899425 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.899491 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.899514 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.899543 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:36 crc kubenswrapper[5003]: I0104 11:49:36.899560 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:36Z","lastTransitionTime":"2026-01-04T11:49:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.003585 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.003641 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.003663 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.003690 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.003708 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:37Z","lastTransitionTime":"2026-01-04T11:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.107192 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.107290 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.107316 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.107347 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.107367 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:37Z","lastTransitionTime":"2026-01-04T11:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.211351 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.211429 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.211453 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.211481 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.211500 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:37Z","lastTransitionTime":"2026-01-04T11:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.314762 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.314847 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.314873 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.314910 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.314938 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:37Z","lastTransitionTime":"2026-01-04T11:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.419002 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.419101 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.419119 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.419148 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.419169 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:37Z","lastTransitionTime":"2026-01-04T11:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.522596 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.522654 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.522673 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.522699 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.522718 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:37Z","lastTransitionTime":"2026-01-04T11:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.626567 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.626633 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.626653 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.626681 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.626699 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:37Z","lastTransitionTime":"2026-01-04T11:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.730667 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.730759 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.730780 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.730812 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.730838 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:37Z","lastTransitionTime":"2026-01-04T11:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.805678 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:37 crc kubenswrapper[5003]: E0104 11:49:37.805863 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.805714 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:37 crc kubenswrapper[5003]: E0104 11:49:37.805982 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.805687 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:37 crc kubenswrapper[5003]: E0104 11:49:37.806127 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.834792 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.834866 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.834884 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.834913 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.834937 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:37Z","lastTransitionTime":"2026-01-04T11:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.939042 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.939119 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.939140 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.939168 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:37 crc kubenswrapper[5003]: I0104 11:49:37.939185 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:37Z","lastTransitionTime":"2026-01-04T11:49:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.043044 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.043127 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.043149 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.043181 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.043204 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:38Z","lastTransitionTime":"2026-01-04T11:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.146426 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.146468 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.146480 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.146500 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.146512 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:38Z","lastTransitionTime":"2026-01-04T11:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.250352 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.250430 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.250450 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.250481 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.250503 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:38Z","lastTransitionTime":"2026-01-04T11:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.354257 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.354357 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.354379 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.354412 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.354435 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:38Z","lastTransitionTime":"2026-01-04T11:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.457645 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.457706 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.457732 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.457762 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.457786 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:38Z","lastTransitionTime":"2026-01-04T11:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.560797 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.560851 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.560862 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.560880 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.560891 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:38Z","lastTransitionTime":"2026-01-04T11:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.664216 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.664269 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.664285 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.664351 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.664370 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:38Z","lastTransitionTime":"2026-01-04T11:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.766867 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.766941 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.766968 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.766999 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.767072 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:38Z","lastTransitionTime":"2026-01-04T11:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.806591 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:49:38 crc kubenswrapper[5003]: E0104 11:49:38.806948 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.807938 5003 scope.go:117] "RemoveContainer" containerID="f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010" Jan 04 11:49:38 crc kubenswrapper[5003]: E0104 11:49:38.808230 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2lwxt_openshift-ovn-kubernetes(e40671d3-61d7-4a50-b4ea-a67e4005fc3f)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.871070 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.871162 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.871188 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.871220 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.871243 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:38Z","lastTransitionTime":"2026-01-04T11:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.975940 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.976050 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.976076 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.976110 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:38 crc kubenswrapper[5003]: I0104 11:49:38.976132 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:38Z","lastTransitionTime":"2026-01-04T11:49:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.079415 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.079491 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.079518 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.079552 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.079581 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:39Z","lastTransitionTime":"2026-01-04T11:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.183546 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.183620 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.183638 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.183665 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.183684 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:39Z","lastTransitionTime":"2026-01-04T11:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.288129 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.288219 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.288240 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.288273 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.288300 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:39Z","lastTransitionTime":"2026-01-04T11:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.392006 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.392089 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.392107 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.392132 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.392149 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:39Z","lastTransitionTime":"2026-01-04T11:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.495735 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.495819 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.495839 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.495867 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.495890 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:39Z","lastTransitionTime":"2026-01-04T11:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.599278 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.599356 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.599375 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.599401 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.599419 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:39Z","lastTransitionTime":"2026-01-04T11:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.703432 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.703489 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.703509 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.703541 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.703561 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:39Z","lastTransitionTime":"2026-01-04T11:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.718922 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.718977 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.718995 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.719051 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.719073 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:39Z","lastTransitionTime":"2026-01-04T11:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:39 crc kubenswrapper[5003]: E0104 11:49:39.742642 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:39Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.749494 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.749544 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.749561 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.749587 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.749607 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:39Z","lastTransitionTime":"2026-01-04T11:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:39 crc kubenswrapper[5003]: E0104 11:49:39.771357 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:39Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.777924 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.777997 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.778044 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.778073 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.778092 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:39Z","lastTransitionTime":"2026-01-04T11:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:39 crc kubenswrapper[5003]: E0104 11:49:39.801236 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:39Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.805890 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.805968 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.806050 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:39 crc kubenswrapper[5003]: E0104 11:49:39.806119 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:39 crc kubenswrapper[5003]: E0104 11:49:39.806286 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:39 crc kubenswrapper[5003]: E0104 11:49:39.806544 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.807915 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.807976 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.807995 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.808051 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.808075 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:39Z","lastTransitionTime":"2026-01-04T11:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:39 crc kubenswrapper[5003]: E0104 11:49:39.829822 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:39Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.836415 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.836488 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.836507 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.836537 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.836558 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:39Z","lastTransitionTime":"2026-01-04T11:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:39 crc kubenswrapper[5003]: E0104 11:49:39.859906 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:39Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:39 crc kubenswrapper[5003]: E0104 11:49:39.860331 5003 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.863267 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.863343 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.863366 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.863398 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.863424 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:39Z","lastTransitionTime":"2026-01-04T11:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.967212 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.967295 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.967314 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.967353 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:39 crc kubenswrapper[5003]: I0104 11:49:39.967382 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:39Z","lastTransitionTime":"2026-01-04T11:49:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.070959 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.071075 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.071101 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.071133 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.071158 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:40Z","lastTransitionTime":"2026-01-04T11:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.175560 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.175652 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.175670 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.175696 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.175711 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:40Z","lastTransitionTime":"2026-01-04T11:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.279399 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.279470 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.279489 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.279513 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.279531 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:40Z","lastTransitionTime":"2026-01-04T11:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.383292 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.383354 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.383367 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.383388 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.383401 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:40Z","lastTransitionTime":"2026-01-04T11:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.487682 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.487797 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.487816 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.487845 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.487867 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:40Z","lastTransitionTime":"2026-01-04T11:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.591177 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.591259 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.591276 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.591311 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.591331 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:40Z","lastTransitionTime":"2026-01-04T11:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.695254 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.695317 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.695338 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.695397 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.695417 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:40Z","lastTransitionTime":"2026-01-04T11:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.797525 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.797567 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.797575 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.797594 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.797605 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:40Z","lastTransitionTime":"2026-01-04T11:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.806138 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:49:40 crc kubenswrapper[5003]: E0104 11:49:40.806274 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.900861 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.900928 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.900946 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.900975 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:40 crc kubenswrapper[5003]: I0104 11:49:40.900995 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:40Z","lastTransitionTime":"2026-01-04T11:49:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.005445 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.005517 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.005537 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.005567 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.005587 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:41Z","lastTransitionTime":"2026-01-04T11:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.109532 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.109615 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.109649 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.109690 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.109710 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:41Z","lastTransitionTime":"2026-01-04T11:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.214231 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.214290 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.214315 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.214345 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.214369 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:41Z","lastTransitionTime":"2026-01-04T11:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.317641 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.318061 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.318331 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.318480 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.318719 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:41Z","lastTransitionTime":"2026-01-04T11:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.421960 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.422080 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.422102 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.422135 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.422160 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:41Z","lastTransitionTime":"2026-01-04T11:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.525490 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.525553 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.525575 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.525600 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.525619 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:41Z","lastTransitionTime":"2026-01-04T11:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.628947 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.629059 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.629074 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.629094 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.629109 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:41Z","lastTransitionTime":"2026-01-04T11:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.732244 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.732313 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.732336 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.732396 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.732421 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:41Z","lastTransitionTime":"2026-01-04T11:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.805856 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.805964 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.805862 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:41 crc kubenswrapper[5003]: E0104 11:49:41.806167 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:41 crc kubenswrapper[5003]: E0104 11:49:41.806252 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:41 crc kubenswrapper[5003]: E0104 11:49:41.806366 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.836328 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.836384 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.836408 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.836438 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.836456 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:41Z","lastTransitionTime":"2026-01-04T11:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.940058 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.940147 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.940159 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.940177 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:41 crc kubenswrapper[5003]: I0104 11:49:41.940189 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:41Z","lastTransitionTime":"2026-01-04T11:49:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.043572 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.043624 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.043637 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.043660 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.043672 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:42Z","lastTransitionTime":"2026-01-04T11:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.147252 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.147360 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.147374 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.147402 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.147418 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:42Z","lastTransitionTime":"2026-01-04T11:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.250428 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.250491 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.250506 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.250532 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.250548 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:42Z","lastTransitionTime":"2026-01-04T11:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.354084 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.354148 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.354166 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.354190 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.354220 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:42Z","lastTransitionTime":"2026-01-04T11:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.458122 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.458179 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.458194 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.458217 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.458233 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:42Z","lastTransitionTime":"2026-01-04T11:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.561260 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.561310 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.561323 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.561340 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.561352 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:42Z","lastTransitionTime":"2026-01-04T11:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.673410 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.673480 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.673503 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.673531 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.673553 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:42Z","lastTransitionTime":"2026-01-04T11:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.777245 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.777316 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.777336 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.777362 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.777380 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:42Z","lastTransitionTime":"2026-01-04T11:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.805805 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:49:42 crc kubenswrapper[5003]: E0104 11:49:42.806046 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.882099 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.882151 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.882166 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.882189 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.882205 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:42Z","lastTransitionTime":"2026-01-04T11:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.986137 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.986837 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.987369 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.987679 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:42 crc kubenswrapper[5003]: I0104 11:49:42.987993 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:42Z","lastTransitionTime":"2026-01-04T11:49:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.097317 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.097385 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.097403 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.097429 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.097446 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:43Z","lastTransitionTime":"2026-01-04T11:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.201512 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.201608 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.201631 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.201664 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.201684 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:43Z","lastTransitionTime":"2026-01-04T11:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.305645 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.305907 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.305927 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.305957 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.305979 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:43Z","lastTransitionTime":"2026-01-04T11:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.410954 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.411113 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.411147 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.411186 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.411213 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:43Z","lastTransitionTime":"2026-01-04T11:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.515553 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.515625 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.515642 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.515671 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.515694 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:43Z","lastTransitionTime":"2026-01-04T11:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.619744 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.619925 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.619991 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.620080 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.620156 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:43Z","lastTransitionTime":"2026-01-04T11:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.725274 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.725376 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.725395 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.725454 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.725474 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:43Z","lastTransitionTime":"2026-01-04T11:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.806246 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.806323 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.806362 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:43 crc kubenswrapper[5003]: E0104 11:49:43.806444 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:43 crc kubenswrapper[5003]: E0104 11:49:43.806610 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:43 crc kubenswrapper[5003]: E0104 11:49:43.806754 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.829179 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.829237 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.829255 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.829281 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.829299 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:43Z","lastTransitionTime":"2026-01-04T11:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.933390 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.933456 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.933474 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.933503 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:43 crc kubenswrapper[5003]: I0104 11:49:43.933522 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:43Z","lastTransitionTime":"2026-01-04T11:49:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.037745 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.037866 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.037893 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.037928 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.037953 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:44Z","lastTransitionTime":"2026-01-04T11:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.141660 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.141770 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.141793 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.141819 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.141842 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:44Z","lastTransitionTime":"2026-01-04T11:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.246452 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.246534 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.246558 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.246589 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.246610 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:44Z","lastTransitionTime":"2026-01-04T11:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.350136 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.350211 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.350228 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.350255 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.350273 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:44Z","lastTransitionTime":"2026-01-04T11:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.453806 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.453878 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.453897 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.453929 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.453951 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:44Z","lastTransitionTime":"2026-01-04T11:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.558134 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.558215 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.558233 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.558261 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.558279 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:44Z","lastTransitionTime":"2026-01-04T11:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.662270 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.662334 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.662351 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.662378 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.662397 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:44Z","lastTransitionTime":"2026-01-04T11:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.765562 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.765627 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.765646 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.765675 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.765697 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:44Z","lastTransitionTime":"2026-01-04T11:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.806487 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:49:44 crc kubenswrapper[5003]: E0104 11:49:44.806688 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.828187 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"01ede350-1819-4157-b857-c68a6a3457dd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b2096947ed145a7b9482af4491217b3657fbbc74654790676a06f70c938e59d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a519008319f92603b4fd769aa06e46ece0161210016ae942dde01d828e4153fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b01101e6a58ad97c5626e5e6b439ac29a0c28384bfc9027b58bc7713942206d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://19b0363ec1bdedddf0eca77d19de840253fdec6bfacc0ba2a2dde00536288c5d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19b0363ec1bdedddf0eca77d19de840253fdec6bfacc0ba2a2dde00536288c5d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.851609 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.872217 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.872308 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.872334 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.872370 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.872391 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:44Z","lastTransitionTime":"2026-01-04T11:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.874230 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c420cfe0f93c8b179402dad57d23e8a21efd0c367a53e3430cc5a3c4f7289b24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9df40ea6ee87a7579db75b9b8851329a2c01c263bd05c36ec141c87f78b6433d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.895486 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.913593 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-2kmwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70c77c4b-a997-4714-9708-d2b725bfe5c5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9912543b188818fb607e71e3c52ced4eff0263f908b4bcfaa4448b91b4cdecb9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6892\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-2kmwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.932741 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a0f60dfb-d71c-415e-840b-4e5cef6037b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c0d47ed48809e56449b68ddfc86eb427381be87a8df60f581867c82d8c44890\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://207fc3c1ffae679d524ce82df81b41f08d722b0923d520ef075f213bc6b71cde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://207fc3c1ffae679d524ce82df81b41f08d722b0923d520ef075f213bc6b71cde\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.954562 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1d785a1a-7eaf-4192-915a-49f478c2a59a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://074271b566706a8718fb11bfe5f97b4f1fdad7c2cb51a8c1940369fa34e96ee0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xhg5j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rcgwp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.971884 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2zwh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9th4p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:42Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2zwh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.976250 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.976321 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.976341 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.976371 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.976390 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:44Z","lastTransitionTime":"2026-01-04T11:49:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:44 crc kubenswrapper[5003]: I0104 11:49:44.992129 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c37e33ab4333d236c1889cea90656aeb33a782cbbb9587b45ef895140b4a496\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:44Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.011379 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc62ea57-079c-45eb-ac97-f7b2617d2bc1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a59cf56f0f1a074cddc740ea67b99623c9241a1daa6c9892eefbf8cd173d9d17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb7dce7f71063eb279df68aad6c83d6bd111eeead8cfb5bd369b72ffb176cde8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d627dd4452de1cd369d5ce79cbff6b8fca6e0d77d7db40b0c9a20ba5ca54975a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.032688 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-np5qh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0cbf399dee48d6e9caa2594fb848b76a2dfc1afea412cbbf8a72f715f6c8e4e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:14Z\\\",\\\"message\\\":\\\"2026-01-04T11:48:29+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_d21e0005-2905-4ac9-9eec-b3f248c9a5bd\\\\n2026-01-04T11:48:29+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_d21e0005-2905-4ac9-9eec-b3f248c9a5bd to /host/opt/cni/bin/\\\\n2026-01-04T11:48:29Z [verbose] multus-daemon started\\\\n2026-01-04T11:48:29Z [verbose] Readiness Indicator file check\\\\n2026-01-04T11:49:14Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:49:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-w66t5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-np5qh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.063921 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:28Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-04T11:49:23Z\\\",\\\"message\\\":\\\"for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:22Z is after 2025-08-24T17:21:41Z]\\\\nI0104 11:49:22.703323 7062 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-apiserver-operator/metrics_TCP_cluster\\\\\\\", UUID:\\\\\\\"8b82f026-5975-4a1b-bb18-08d5d51147ec\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-apiserver-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-apiserver-operator/metrics_TCP_clus\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:49:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2lwxt_openshift-ovn-kubernetes(e40671d3-61d7-4a50-b4ea-a67e4005fc3f)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ss2hj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2lwxt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.080261 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.080330 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.080350 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.080378 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.080396 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:45Z","lastTransitionTime":"2026-01-04T11:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.085231 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-nlgkr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"74725edc-9566-4f57-81fc-1faf878f1ede\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d4d10bbe58078271710b796ba570179704ebf572d97403c5fba2f049610896a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tg64j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:30Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-nlgkr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.102675 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65e8a928-4671-4299-840a-812a83f36ba1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e78602107a0c94d79560902c8a3636e32998d67b852202fe4329ddea3c93d4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfebfbfc8fadf934d7b9ee31dfcf4533f17e01c47d9a96ad86de85c19f1bb983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvb2d\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-9x5wb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.125906 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ed41329b-30c2-4489-9315-534f0431252c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"lling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0104 11:48:18.426243 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0104 11:48:18.429399 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-134375423/tls.crt::/tmp/serving-cert-134375423/tls.key\\\\\\\"\\\\nI0104 11:48:23.749351 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0104 11:48:23.751366 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0104 11:48:23.751388 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0104 11:48:23.751413 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0104 11:48:23.751419 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0104 11:48:23.755797 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0104 11:48:23.755811 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0104 11:48:23.755858 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755869 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0104 11:48:23.755877 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0104 11:48:23.755884 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0104 11:48:23.755890 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0104 11:48:23.755898 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0104 11:48:23.757097 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.145638 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://081dc5760e2d519907a1a3f19398f1f845436b9c915d09563fa0ea267a0f82b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.166613 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.183899 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.183966 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.183985 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.184043 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.184064 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:45Z","lastTransitionTime":"2026-01-04T11:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.190848 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a8de5487-0fcc-4344-a821-e485f3090ecb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b5b85db2c1c247bb94afa60d23ada02b9349e9a716de56490ce5c51a76dce9b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81cca55308af422b05d2bd8d15f6615072aabd017c7cc1e2e09ebc7b4f98b802\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82e72c4890793315ac613881408b941f476eafe5139de1133baec5e56f33985f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0b47f56220f427d27c56c88a26f83b64dab4cd87b84b1821566ff31afff12bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://248236fecc276e9edaf4ffa9f64b03e6ea9d9615e7162bca822efe2a0ac0d3d7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://77bf17e3bd19384de4b6d7f49c5c73fbd276e2001b0ede0ae8960c69f8769163\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://364092644d6c7a6272830c4cbf0c6068763ae94aa4e7fca306a7b8745da2d92c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cvkrg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:28Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-qpwf5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.223982 5003 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d345a08-f54a-4ca9-b4bb-8a37b6a5c1f7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-04T11:48:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a63e43e4b06e721d23fc455abde6d4a3193875172fe7b5072ce91fa36544242f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://15be58a9698b2bbfff670eb49fa3522f9e5c6fddbacff39a1b7d0df34c60e7a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://43926b14a991cc29f911850744a62b2ad166b5d10b7a82cfbd552c94ec1873b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0bd956da291d450740371c563811bb89fb220d1bf10976724e1c21627c651b4c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f23c6dd3c5ed8f7234409e1ef4e94edeacebb088c51c9e09150f26e73dd0ea6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-04T11:48:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://74d63d573a88e5702e65967cb2affba70c288c6062c5e01309b7a36de597637d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11de898ea4b3356b3aed7b283002d5cbec1a7e4c124ae117de84a0f8383fd538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:06Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a744382c2e9e0f0b506eda3cb1c002c765bfaf35b43aa9baae0c46773616f6e4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-04T11:48:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-04T11:48:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-04T11:48:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:45Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.286476 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.286540 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.286563 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.286594 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.286618 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:45Z","lastTransitionTime":"2026-01-04T11:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.389539 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.389591 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.389611 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.389639 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.389658 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:45Z","lastTransitionTime":"2026-01-04T11:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.492151 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.492210 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.492230 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.492253 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.492271 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:45Z","lastTransitionTime":"2026-01-04T11:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.595352 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.595416 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.595434 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.595463 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.595484 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:45Z","lastTransitionTime":"2026-01-04T11:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.699372 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.699455 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.699470 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.699490 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.699503 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:45Z","lastTransitionTime":"2026-01-04T11:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.802771 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.802836 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.802855 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.802884 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.802903 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:45Z","lastTransitionTime":"2026-01-04T11:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.805995 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.806109 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:45 crc kubenswrapper[5003]: E0104 11:49:45.806187 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:45 crc kubenswrapper[5003]: E0104 11:49:45.806314 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.806426 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:45 crc kubenswrapper[5003]: E0104 11:49:45.806516 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.912358 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.912421 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.912455 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.912486 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:45 crc kubenswrapper[5003]: I0104 11:49:45.912505 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:45Z","lastTransitionTime":"2026-01-04T11:49:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.015135 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.015215 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.015229 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.015254 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.015270 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:46Z","lastTransitionTime":"2026-01-04T11:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.119193 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.119255 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.119275 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.119306 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.119325 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:46Z","lastTransitionTime":"2026-01-04T11:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.223491 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.223559 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.223578 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.223606 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.223626 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:46Z","lastTransitionTime":"2026-01-04T11:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.327479 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.327592 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.327612 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.327651 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.327674 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:46Z","lastTransitionTime":"2026-01-04T11:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.432491 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.432589 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.432610 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.432673 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.432694 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:46Z","lastTransitionTime":"2026-01-04T11:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.535580 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.535648 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.535665 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.535696 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.535714 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:46Z","lastTransitionTime":"2026-01-04T11:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.639263 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.639343 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.639364 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.639396 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.639416 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:46Z","lastTransitionTime":"2026-01-04T11:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.734742 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cd6de6ec-2a7c-4842-9d8a-ba4032acb50e-metrics-certs\") pod \"network-metrics-daemon-n2zwh\" (UID: \"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e\") " pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:49:46 crc kubenswrapper[5003]: E0104 11:49:46.734990 5003 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 04 11:49:46 crc kubenswrapper[5003]: E0104 11:49:46.735126 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cd6de6ec-2a7c-4842-9d8a-ba4032acb50e-metrics-certs podName:cd6de6ec-2a7c-4842-9d8a-ba4032acb50e nodeName:}" failed. No retries permitted until 2026-01-04 11:50:50.735096303 +0000 UTC m=+166.208126174 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/cd6de6ec-2a7c-4842-9d8a-ba4032acb50e-metrics-certs") pod "network-metrics-daemon-n2zwh" (UID: "cd6de6ec-2a7c-4842-9d8a-ba4032acb50e") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.743198 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.743252 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.743269 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.743301 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.743324 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:46Z","lastTransitionTime":"2026-01-04T11:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.806742 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:49:46 crc kubenswrapper[5003]: E0104 11:49:46.807103 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.847105 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.847185 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.847205 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.847234 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.847258 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:46Z","lastTransitionTime":"2026-01-04T11:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.951159 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.951249 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.951272 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.951308 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:46 crc kubenswrapper[5003]: I0104 11:49:46.951331 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:46Z","lastTransitionTime":"2026-01-04T11:49:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.055641 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.055718 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.055745 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.055781 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.055811 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:47Z","lastTransitionTime":"2026-01-04T11:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.159178 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.159262 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.159284 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.159317 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.159339 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:47Z","lastTransitionTime":"2026-01-04T11:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.262546 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.262622 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.262640 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.262666 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.262683 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:47Z","lastTransitionTime":"2026-01-04T11:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.366296 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.366372 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.366391 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.366425 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.366451 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:47Z","lastTransitionTime":"2026-01-04T11:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.469806 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.469875 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.469935 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.470008 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.470126 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:47Z","lastTransitionTime":"2026-01-04T11:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.573971 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.574088 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.574109 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.574140 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.574159 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:47Z","lastTransitionTime":"2026-01-04T11:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.677950 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.678072 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.678102 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.678131 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.678155 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:47Z","lastTransitionTime":"2026-01-04T11:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.782002 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.782097 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.782113 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.782141 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.782165 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:47Z","lastTransitionTime":"2026-01-04T11:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.805920 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.805970 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.806145 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:47 crc kubenswrapper[5003]: E0104 11:49:47.806322 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:47 crc kubenswrapper[5003]: E0104 11:49:47.806511 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:47 crc kubenswrapper[5003]: E0104 11:49:47.806633 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.885753 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.885821 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.885843 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.885874 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.885895 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:47Z","lastTransitionTime":"2026-01-04T11:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.990198 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.990307 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.990334 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.990375 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:47 crc kubenswrapper[5003]: I0104 11:49:47.990402 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:47Z","lastTransitionTime":"2026-01-04T11:49:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.094053 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.094126 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.094145 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.094177 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.094196 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:48Z","lastTransitionTime":"2026-01-04T11:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.197485 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.197565 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.197587 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.197618 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.197644 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:48Z","lastTransitionTime":"2026-01-04T11:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.301355 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.301429 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.301449 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.301475 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.301493 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:48Z","lastTransitionTime":"2026-01-04T11:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.404721 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.404800 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.404819 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.404846 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.404865 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:48Z","lastTransitionTime":"2026-01-04T11:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.507983 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.508101 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.508121 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.508151 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.508173 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:48Z","lastTransitionTime":"2026-01-04T11:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.611430 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.611509 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.611538 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.611573 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.611634 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:48Z","lastTransitionTime":"2026-01-04T11:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.714825 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.714874 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.714894 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.714915 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.714934 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:48Z","lastTransitionTime":"2026-01-04T11:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.806360 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:49:48 crc kubenswrapper[5003]: E0104 11:49:48.806597 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.817290 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.817346 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.817365 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.817390 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.817410 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:48Z","lastTransitionTime":"2026-01-04T11:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.920958 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.921206 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.921233 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.921265 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:48 crc kubenswrapper[5003]: I0104 11:49:48.921289 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:48Z","lastTransitionTime":"2026-01-04T11:49:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.023963 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.024072 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.024211 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.024247 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.024271 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:49Z","lastTransitionTime":"2026-01-04T11:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.128210 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.128279 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.128299 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.128325 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.128349 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:49Z","lastTransitionTime":"2026-01-04T11:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.234864 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.234959 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.234978 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.235005 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.235050 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:49Z","lastTransitionTime":"2026-01-04T11:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.338733 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.338812 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.338831 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.338862 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.338885 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:49Z","lastTransitionTime":"2026-01-04T11:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.442332 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.442430 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.442460 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.442497 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.442525 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:49Z","lastTransitionTime":"2026-01-04T11:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.545328 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.545418 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.545441 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.545478 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.545504 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:49Z","lastTransitionTime":"2026-01-04T11:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.649181 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.649572 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.649711 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.649915 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.650097 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:49Z","lastTransitionTime":"2026-01-04T11:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.754098 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.754432 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.754623 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.754820 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.754984 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:49Z","lastTransitionTime":"2026-01-04T11:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.805665 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.805893 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.805694 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:49 crc kubenswrapper[5003]: E0104 11:49:49.806396 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:49 crc kubenswrapper[5003]: E0104 11:49:49.806611 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:49 crc kubenswrapper[5003]: E0104 11:49:49.806934 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.857442 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.857495 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.857506 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.857521 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.857531 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:49Z","lastTransitionTime":"2026-01-04T11:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.960745 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.960828 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.960852 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.960884 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:49 crc kubenswrapper[5003]: I0104 11:49:49.960906 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:49Z","lastTransitionTime":"2026-01-04T11:49:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.064629 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.065286 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.065326 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.065365 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.065409 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:50Z","lastTransitionTime":"2026-01-04T11:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.168768 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.168850 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.168868 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.168894 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.168915 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:50Z","lastTransitionTime":"2026-01-04T11:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.193708 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.193800 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.193825 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.193900 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.193917 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:50Z","lastTransitionTime":"2026-01-04T11:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:50 crc kubenswrapper[5003]: E0104 11:49:50.215801 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:50Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.221829 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.221858 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.221871 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.221888 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.221898 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:50Z","lastTransitionTime":"2026-01-04T11:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:50 crc kubenswrapper[5003]: E0104 11:49:50.248518 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:50Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.254613 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.254645 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.254655 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.254669 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.254681 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:50Z","lastTransitionTime":"2026-01-04T11:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:50 crc kubenswrapper[5003]: E0104 11:49:50.272772 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:50Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.278887 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.278923 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.278932 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.278946 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.278955 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:50Z","lastTransitionTime":"2026-01-04T11:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:50 crc kubenswrapper[5003]: E0104 11:49:50.298497 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:50Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.304577 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.304628 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.304645 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.304667 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.304685 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:50Z","lastTransitionTime":"2026-01-04T11:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:50 crc kubenswrapper[5003]: E0104 11:49:50.325260 5003 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-04T11:49:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"7f249d9a-fde4-4cda-b9be-cd9f47dca495\\\",\\\"systemUUID\\\":\\\"11f74ffa-1339-43e9-94e3-2ecf4c29070a\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-04T11:49:50Z is after 2025-08-24T17:21:41Z" Jan 04 11:49:50 crc kubenswrapper[5003]: E0104 11:49:50.325602 5003 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.333844 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.333883 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.333893 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.333910 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.333924 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:50Z","lastTransitionTime":"2026-01-04T11:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.438399 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.438524 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.438546 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.438579 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.438611 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:50Z","lastTransitionTime":"2026-01-04T11:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.543377 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.543488 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.543512 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.543545 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.543573 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:50Z","lastTransitionTime":"2026-01-04T11:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.647785 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.647900 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.647925 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.647963 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.647993 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:50Z","lastTransitionTime":"2026-01-04T11:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.751089 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.751146 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.751160 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.751185 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.751200 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:50Z","lastTransitionTime":"2026-01-04T11:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.806496 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:49:50 crc kubenswrapper[5003]: E0104 11:49:50.807128 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.854054 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.854095 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.854108 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.854124 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.854135 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:50Z","lastTransitionTime":"2026-01-04T11:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.957402 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.957579 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.957602 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.957670 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:50 crc kubenswrapper[5003]: I0104 11:49:50.957694 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:50Z","lastTransitionTime":"2026-01-04T11:49:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.060209 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.060246 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.060255 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.060269 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.060278 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:51Z","lastTransitionTime":"2026-01-04T11:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.162849 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.162936 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.162953 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.162975 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.162992 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:51Z","lastTransitionTime":"2026-01-04T11:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.266305 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.266378 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.266397 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.266420 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.266439 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:51Z","lastTransitionTime":"2026-01-04T11:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.368809 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.368848 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.368859 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.368875 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.368888 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:51Z","lastTransitionTime":"2026-01-04T11:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.472128 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.472268 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.472332 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.472359 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.472376 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:51Z","lastTransitionTime":"2026-01-04T11:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.575609 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.575654 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.575665 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.575682 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.575694 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:51Z","lastTransitionTime":"2026-01-04T11:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.679627 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.680155 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.680400 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.680619 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.680805 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:51Z","lastTransitionTime":"2026-01-04T11:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.784329 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.784412 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.784436 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.784467 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.784495 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:51Z","lastTransitionTime":"2026-01-04T11:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.805734 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.805863 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:51 crc kubenswrapper[5003]: E0104 11:49:51.805939 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.805970 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:51 crc kubenswrapper[5003]: E0104 11:49:51.806218 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:51 crc kubenswrapper[5003]: E0104 11:49:51.806765 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.807205 5003 scope.go:117] "RemoveContainer" containerID="f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010" Jan 04 11:49:51 crc kubenswrapper[5003]: E0104 11:49:51.807599 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2lwxt_openshift-ovn-kubernetes(e40671d3-61d7-4a50-b4ea-a67e4005fc3f)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.888107 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.888487 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.888664 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.888856 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.889087 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:51Z","lastTransitionTime":"2026-01-04T11:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.993657 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.993695 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.993707 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.993725 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:51 crc kubenswrapper[5003]: I0104 11:49:51.993738 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:51Z","lastTransitionTime":"2026-01-04T11:49:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.096857 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.097298 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.097540 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.097769 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.098068 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:52Z","lastTransitionTime":"2026-01-04T11:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.201165 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.201241 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.201265 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.201295 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.201312 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:52Z","lastTransitionTime":"2026-01-04T11:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.304193 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.304233 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.304245 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.304283 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.304296 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:52Z","lastTransitionTime":"2026-01-04T11:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.407172 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.407236 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.407258 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.407288 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.407312 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:52Z","lastTransitionTime":"2026-01-04T11:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.511633 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.511973 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.512160 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.512316 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.512453 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:52Z","lastTransitionTime":"2026-01-04T11:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.615714 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.615819 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.615850 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.615968 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.616099 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:52Z","lastTransitionTime":"2026-01-04T11:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.719650 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.719723 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.719744 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.719768 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.719784 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:52Z","lastTransitionTime":"2026-01-04T11:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.807173 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:49:52 crc kubenswrapper[5003]: E0104 11:49:52.807479 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.822941 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.823002 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.823071 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.823099 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.823121 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:52Z","lastTransitionTime":"2026-01-04T11:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.926117 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.926187 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.926297 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.926333 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:52 crc kubenswrapper[5003]: I0104 11:49:52.926355 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:52Z","lastTransitionTime":"2026-01-04T11:49:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.030191 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.030403 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.030425 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.030504 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.030524 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:53Z","lastTransitionTime":"2026-01-04T11:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.133645 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.133740 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.133759 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.133788 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.133808 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:53Z","lastTransitionTime":"2026-01-04T11:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.238052 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.238140 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.238164 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.238197 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.238220 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:53Z","lastTransitionTime":"2026-01-04T11:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.341511 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.341575 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.341594 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.341619 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.341636 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:53Z","lastTransitionTime":"2026-01-04T11:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.445194 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.445252 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.445270 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.445294 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.445355 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:53Z","lastTransitionTime":"2026-01-04T11:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.548537 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.548625 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.548648 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.548677 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.548698 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:53Z","lastTransitionTime":"2026-01-04T11:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.652101 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.652843 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.652873 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.652905 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.652928 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:53Z","lastTransitionTime":"2026-01-04T11:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.756802 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.756862 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.756874 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.756895 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.756909 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:53Z","lastTransitionTime":"2026-01-04T11:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.806474 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.806680 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:53 crc kubenswrapper[5003]: E0104 11:49:53.806751 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.806794 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:53 crc kubenswrapper[5003]: E0104 11:49:53.806928 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:53 crc kubenswrapper[5003]: E0104 11:49:53.807124 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.860619 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.860671 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.860690 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.860710 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.860727 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:53Z","lastTransitionTime":"2026-01-04T11:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.964447 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.964504 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.964520 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.964543 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:53 crc kubenswrapper[5003]: I0104 11:49:53.964561 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:53Z","lastTransitionTime":"2026-01-04T11:49:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.068068 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.068118 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.068128 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.068144 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.068157 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:54Z","lastTransitionTime":"2026-01-04T11:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.171244 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.171317 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.171340 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.171368 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.171390 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:54Z","lastTransitionTime":"2026-01-04T11:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.274523 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.274582 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.274595 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.274618 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.274631 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:54Z","lastTransitionTime":"2026-01-04T11:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.378505 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.378569 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.378602 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.378631 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.378651 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:54Z","lastTransitionTime":"2026-01-04T11:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.482388 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.482440 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.482450 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.482467 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.482479 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:54Z","lastTransitionTime":"2026-01-04T11:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.584954 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.585004 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.585033 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.585050 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.585065 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:54Z","lastTransitionTime":"2026-01-04T11:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.687492 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.687539 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.687551 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.687571 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.687583 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:54Z","lastTransitionTime":"2026-01-04T11:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.790652 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.790702 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.790717 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.790736 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.790750 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:54Z","lastTransitionTime":"2026-01-04T11:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.806442 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:49:54 crc kubenswrapper[5003]: E0104 11:49:54.806575 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.896968 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.897058 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.897077 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.897102 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.897119 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:54Z","lastTransitionTime":"2026-01-04T11:49:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.898697 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-nlgkr" podStartSLOduration=87.898674473 podStartE2EDuration="1m27.898674473s" podCreationTimestamp="2026-01-04 11:48:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:49:54.882719594 +0000 UTC m=+110.355749445" watchObservedRunningTime="2026-01-04 11:49:54.898674473 +0000 UTC m=+110.371704324" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.924302 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-9x5wb" podStartSLOduration=86.924260486 podStartE2EDuration="1m26.924260486s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:49:54.899619468 +0000 UTC m=+110.372649349" watchObservedRunningTime="2026-01-04 11:49:54.924260486 +0000 UTC m=+110.397290337" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.924827 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=90.924819651 podStartE2EDuration="1m30.924819651s" podCreationTimestamp="2026-01-04 11:48:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:49:54.924183584 +0000 UTC m=+110.397213455" watchObservedRunningTime="2026-01-04 11:49:54.924819651 +0000 UTC m=+110.397849502" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.964263 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=91.964243308 podStartE2EDuration="1m31.964243308s" podCreationTimestamp="2026-01-04 11:48:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:49:54.94417892 +0000 UTC m=+110.417208801" watchObservedRunningTime="2026-01-04 11:49:54.964243308 +0000 UTC m=+110.437273149" Jan 04 11:49:54 crc kubenswrapper[5003]: I0104 11:49:54.990061 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-np5qh" podStartSLOduration=86.990044276 podStartE2EDuration="1m26.990044276s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:49:54.965173792 +0000 UTC m=+110.438203653" watchObservedRunningTime="2026-01-04 11:49:54.990044276 +0000 UTC m=+110.463074117" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.002420 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.002454 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.002463 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.002475 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.002484 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:55Z","lastTransitionTime":"2026-01-04T11:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.021366 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-qpwf5" podStartSLOduration=87.02134706 podStartE2EDuration="1m27.02134706s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:49:54.989865702 +0000 UTC m=+110.462895553" watchObservedRunningTime="2026-01-04 11:49:55.02134706 +0000 UTC m=+110.494376921" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.021669 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=88.021664438 podStartE2EDuration="1m28.021664438s" podCreationTimestamp="2026-01-04 11:48:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:49:55.020493747 +0000 UTC m=+110.493523588" watchObservedRunningTime="2026-01-04 11:49:55.021664438 +0000 UTC m=+110.494694289" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.085504 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-2kmwl" podStartSLOduration=88.085487817 podStartE2EDuration="1m28.085487817s" podCreationTimestamp="2026-01-04 11:48:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:49:55.085043625 +0000 UTC m=+110.558073486" watchObservedRunningTime="2026-01-04 11:49:55.085487817 +0000 UTC m=+110.558517658" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.095454 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=40.095438179 podStartE2EDuration="40.095438179s" podCreationTimestamp="2026-01-04 11:49:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:49:55.094331249 +0000 UTC m=+110.567361080" watchObservedRunningTime="2026-01-04 11:49:55.095438179 +0000 UTC m=+110.568468020" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.104600 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=60.104583639 podStartE2EDuration="1m0.104583639s" podCreationTimestamp="2026-01-04 11:48:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:49:55.104085766 +0000 UTC m=+110.577115607" watchObservedRunningTime="2026-01-04 11:49:55.104583639 +0000 UTC m=+110.577613500" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.104918 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.105226 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.105236 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.105251 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.105262 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:55Z","lastTransitionTime":"2026-01-04T11:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.139361 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podStartSLOduration=87.139346884 podStartE2EDuration="1m27.139346884s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:49:55.138861731 +0000 UTC m=+110.611891602" watchObservedRunningTime="2026-01-04 11:49:55.139346884 +0000 UTC m=+110.612376735" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.207529 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.207562 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.207573 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.207587 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.207596 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:55Z","lastTransitionTime":"2026-01-04T11:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.310127 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.310177 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.310194 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.310214 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.310229 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:55Z","lastTransitionTime":"2026-01-04T11:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.412251 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.412283 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.412291 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.412304 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.412313 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:55Z","lastTransitionTime":"2026-01-04T11:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.515050 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.515601 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.515614 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.515634 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.515674 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:55Z","lastTransitionTime":"2026-01-04T11:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.619459 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.619531 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.619549 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.619574 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.619591 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:55Z","lastTransitionTime":"2026-01-04T11:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.722834 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.722882 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.722900 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.722922 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.722940 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:55Z","lastTransitionTime":"2026-01-04T11:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.806673 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.806746 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:55 crc kubenswrapper[5003]: E0104 11:49:55.806821 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:55 crc kubenswrapper[5003]: E0104 11:49:55.806924 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.807054 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:55 crc kubenswrapper[5003]: E0104 11:49:55.807139 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.825196 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.825230 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.825242 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.825257 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.825271 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:55Z","lastTransitionTime":"2026-01-04T11:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.928961 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.929110 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.929137 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.929173 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:55 crc kubenswrapper[5003]: I0104 11:49:55.929198 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:55Z","lastTransitionTime":"2026-01-04T11:49:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.032579 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.032638 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.032655 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.032679 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.032697 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:56Z","lastTransitionTime":"2026-01-04T11:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.138453 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.138523 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.138541 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.138580 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.138606 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:56Z","lastTransitionTime":"2026-01-04T11:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.242974 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.243066 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.243086 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.243111 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.243129 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:56Z","lastTransitionTime":"2026-01-04T11:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.347001 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.347112 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.347140 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.347173 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.347202 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:56Z","lastTransitionTime":"2026-01-04T11:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.450397 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.450463 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.450481 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.450506 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.450524 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:56Z","lastTransitionTime":"2026-01-04T11:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.553325 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.553387 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.553406 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.553430 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.553447 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:56Z","lastTransitionTime":"2026-01-04T11:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.656972 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.657095 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.657114 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.657144 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.657169 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:56Z","lastTransitionTime":"2026-01-04T11:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.759803 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.759903 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.759924 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.759952 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.759975 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:56Z","lastTransitionTime":"2026-01-04T11:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.806230 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:49:56 crc kubenswrapper[5003]: E0104 11:49:56.806410 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.863370 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.863433 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.863454 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.863478 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.863503 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:56Z","lastTransitionTime":"2026-01-04T11:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.967499 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.967588 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.967608 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.967676 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:56 crc kubenswrapper[5003]: I0104 11:49:56.967711 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:56Z","lastTransitionTime":"2026-01-04T11:49:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.070804 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.070840 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.070850 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.070864 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.070872 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:57Z","lastTransitionTime":"2026-01-04T11:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.173010 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.173128 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.173152 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.173185 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.173206 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:57Z","lastTransitionTime":"2026-01-04T11:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.276351 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.276431 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.276455 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.276490 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.276510 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:57Z","lastTransitionTime":"2026-01-04T11:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.379410 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.379508 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.379528 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.379550 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.379567 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:57Z","lastTransitionTime":"2026-01-04T11:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.488260 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.488419 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.488438 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.488468 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.488486 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:57Z","lastTransitionTime":"2026-01-04T11:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.591161 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.591226 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.591244 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.591270 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.591287 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:57Z","lastTransitionTime":"2026-01-04T11:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.694450 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.694522 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.694542 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.694573 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.694592 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:57Z","lastTransitionTime":"2026-01-04T11:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.798458 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.798528 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.798545 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.798571 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.798590 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:57Z","lastTransitionTime":"2026-01-04T11:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.805934 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.805981 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.805946 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:57 crc kubenswrapper[5003]: E0104 11:49:57.806184 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:57 crc kubenswrapper[5003]: E0104 11:49:57.806374 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:57 crc kubenswrapper[5003]: E0104 11:49:57.806558 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.901708 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.901837 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.901912 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.901994 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:57 crc kubenswrapper[5003]: I0104 11:49:57.902133 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:57Z","lastTransitionTime":"2026-01-04T11:49:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.005578 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.005658 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.005682 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.005712 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.005731 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:58Z","lastTransitionTime":"2026-01-04T11:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.118199 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.118292 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.118318 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.118362 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.118386 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:58Z","lastTransitionTime":"2026-01-04T11:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.227798 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.227895 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.227947 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.228075 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.228109 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:58Z","lastTransitionTime":"2026-01-04T11:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.332163 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.332236 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.332256 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.332283 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.332307 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:58Z","lastTransitionTime":"2026-01-04T11:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.435652 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.435725 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.435742 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.435769 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.435791 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:58Z","lastTransitionTime":"2026-01-04T11:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.538746 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.538804 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.538828 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.538854 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.538873 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:58Z","lastTransitionTime":"2026-01-04T11:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.642818 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.642924 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.642943 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.642971 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.642992 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:58Z","lastTransitionTime":"2026-01-04T11:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.746572 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.746629 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.746648 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.746676 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.746697 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:58Z","lastTransitionTime":"2026-01-04T11:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.806503 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:49:58 crc kubenswrapper[5003]: E0104 11:49:58.806729 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.850573 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.850641 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.850664 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.850696 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.850721 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:58Z","lastTransitionTime":"2026-01-04T11:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.955563 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.955656 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.955676 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.955705 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:58 crc kubenswrapper[5003]: I0104 11:49:58.955726 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:58Z","lastTransitionTime":"2026-01-04T11:49:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.063154 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.063213 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.063233 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.063258 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.063279 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:59Z","lastTransitionTime":"2026-01-04T11:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.167996 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.168100 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.168120 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.168148 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.168169 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:59Z","lastTransitionTime":"2026-01-04T11:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.272491 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.272556 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.272577 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.272604 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.272626 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:59Z","lastTransitionTime":"2026-01-04T11:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.377372 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.377444 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.377462 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.377520 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.377540 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:59Z","lastTransitionTime":"2026-01-04T11:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.480768 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.480851 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.480869 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.480896 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.480924 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:59Z","lastTransitionTime":"2026-01-04T11:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.584240 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.584317 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.584339 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.584366 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.584386 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:59Z","lastTransitionTime":"2026-01-04T11:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.687994 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.688124 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.688142 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.688173 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.688195 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:59Z","lastTransitionTime":"2026-01-04T11:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.793243 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.793294 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.793307 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.793324 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.793337 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:59Z","lastTransitionTime":"2026-01-04T11:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.806260 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.806268 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.806271 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:49:59 crc kubenswrapper[5003]: E0104 11:49:59.806399 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:49:59 crc kubenswrapper[5003]: E0104 11:49:59.806539 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:49:59 crc kubenswrapper[5003]: E0104 11:49:59.806665 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.896706 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.896764 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.896780 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.896805 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:49:59 crc kubenswrapper[5003]: I0104 11:49:59.896822 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:49:59Z","lastTransitionTime":"2026-01-04T11:49:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.000411 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.000470 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.000486 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.000511 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.000527 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:00Z","lastTransitionTime":"2026-01-04T11:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.105479 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.105548 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.105566 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.105596 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.105616 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:00Z","lastTransitionTime":"2026-01-04T11:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.208747 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.208821 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.208840 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.208873 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.208895 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:00Z","lastTransitionTime":"2026-01-04T11:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.312223 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.312330 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.312358 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.312395 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.312422 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:00Z","lastTransitionTime":"2026-01-04T11:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.415734 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.415787 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.415800 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.415817 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.415829 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:00Z","lastTransitionTime":"2026-01-04T11:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.519960 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.520071 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.520092 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.520121 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.520144 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:00Z","lastTransitionTime":"2026-01-04T11:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.521723 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.521774 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.521793 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.521813 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.521829 5003 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-04T11:50:00Z","lastTransitionTime":"2026-01-04T11:50:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.594493 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-scjnv"] Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.595139 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-scjnv" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.599630 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.599706 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.599866 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.615554 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.712713 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9db88b69-ae39-4adf-b372-2e65709d0eee-service-ca\") pod \"cluster-version-operator-5c965bbfc6-scjnv\" (UID: \"9db88b69-ae39-4adf-b372-2e65709d0eee\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-scjnv" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.712823 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/9db88b69-ae39-4adf-b372-2e65709d0eee-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-scjnv\" (UID: \"9db88b69-ae39-4adf-b372-2e65709d0eee\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-scjnv" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.712960 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/9db88b69-ae39-4adf-b372-2e65709d0eee-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-scjnv\" (UID: \"9db88b69-ae39-4adf-b372-2e65709d0eee\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-scjnv" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.713068 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9db88b69-ae39-4adf-b372-2e65709d0eee-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-scjnv\" (UID: \"9db88b69-ae39-4adf-b372-2e65709d0eee\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-scjnv" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.713166 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9db88b69-ae39-4adf-b372-2e65709d0eee-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-scjnv\" (UID: \"9db88b69-ae39-4adf-b372-2e65709d0eee\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-scjnv" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.807071 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:50:00 crc kubenswrapper[5003]: E0104 11:50:00.807342 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.814966 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9db88b69-ae39-4adf-b372-2e65709d0eee-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-scjnv\" (UID: \"9db88b69-ae39-4adf-b372-2e65709d0eee\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-scjnv" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.815133 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9db88b69-ae39-4adf-b372-2e65709d0eee-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-scjnv\" (UID: \"9db88b69-ae39-4adf-b372-2e65709d0eee\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-scjnv" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.815209 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9db88b69-ae39-4adf-b372-2e65709d0eee-service-ca\") pod \"cluster-version-operator-5c965bbfc6-scjnv\" (UID: \"9db88b69-ae39-4adf-b372-2e65709d0eee\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-scjnv" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.815274 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/9db88b69-ae39-4adf-b372-2e65709d0eee-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-scjnv\" (UID: \"9db88b69-ae39-4adf-b372-2e65709d0eee\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-scjnv" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.815354 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/9db88b69-ae39-4adf-b372-2e65709d0eee-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-scjnv\" (UID: \"9db88b69-ae39-4adf-b372-2e65709d0eee\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-scjnv" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.815504 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/9db88b69-ae39-4adf-b372-2e65709d0eee-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-scjnv\" (UID: \"9db88b69-ae39-4adf-b372-2e65709d0eee\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-scjnv" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.815600 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/9db88b69-ae39-4adf-b372-2e65709d0eee-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-scjnv\" (UID: \"9db88b69-ae39-4adf-b372-2e65709d0eee\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-scjnv" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.817188 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9db88b69-ae39-4adf-b372-2e65709d0eee-service-ca\") pod \"cluster-version-operator-5c965bbfc6-scjnv\" (UID: \"9db88b69-ae39-4adf-b372-2e65709d0eee\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-scjnv" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.824693 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9db88b69-ae39-4adf-b372-2e65709d0eee-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-scjnv\" (UID: \"9db88b69-ae39-4adf-b372-2e65709d0eee\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-scjnv" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.847366 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9db88b69-ae39-4adf-b372-2e65709d0eee-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-scjnv\" (UID: \"9db88b69-ae39-4adf-b372-2e65709d0eee\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-scjnv" Jan 04 11:50:00 crc kubenswrapper[5003]: I0104 11:50:00.932195 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-scjnv" Jan 04 11:50:01 crc kubenswrapper[5003]: I0104 11:50:01.556706 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-np5qh_6e5d41d8-142e-4ca3-a20a-f6d338aaddf2/kube-multus/1.log" Jan 04 11:50:01 crc kubenswrapper[5003]: I0104 11:50:01.557895 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-np5qh_6e5d41d8-142e-4ca3-a20a-f6d338aaddf2/kube-multus/0.log" Jan 04 11:50:01 crc kubenswrapper[5003]: I0104 11:50:01.557994 5003 generic.go:334] "Generic (PLEG): container finished" podID="6e5d41d8-142e-4ca3-a20a-f6d338aaddf2" containerID="0cbf399dee48d6e9caa2594fb848b76a2dfc1afea412cbbf8a72f715f6c8e4e7" exitCode=1 Jan 04 11:50:01 crc kubenswrapper[5003]: I0104 11:50:01.558184 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-np5qh" event={"ID":"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2","Type":"ContainerDied","Data":"0cbf399dee48d6e9caa2594fb848b76a2dfc1afea412cbbf8a72f715f6c8e4e7"} Jan 04 11:50:01 crc kubenswrapper[5003]: I0104 11:50:01.558283 5003 scope.go:117] "RemoveContainer" containerID="32b666029f9a784bd82cf39653f98d36fe4e109db4011309c3bdda804a02f3e1" Jan 04 11:50:01 crc kubenswrapper[5003]: I0104 11:50:01.558848 5003 scope.go:117] "RemoveContainer" containerID="0cbf399dee48d6e9caa2594fb848b76a2dfc1afea412cbbf8a72f715f6c8e4e7" Jan 04 11:50:01 crc kubenswrapper[5003]: E0104 11:50:01.559442 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-np5qh_openshift-multus(6e5d41d8-142e-4ca3-a20a-f6d338aaddf2)\"" pod="openshift-multus/multus-np5qh" podUID="6e5d41d8-142e-4ca3-a20a-f6d338aaddf2" Jan 04 11:50:01 crc kubenswrapper[5003]: I0104 11:50:01.560988 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-scjnv" event={"ID":"9db88b69-ae39-4adf-b372-2e65709d0eee","Type":"ContainerStarted","Data":"844258308e01d15940b35a022186ce54468427118de76bce019e1c964be2cc99"} Jan 04 11:50:01 crc kubenswrapper[5003]: I0104 11:50:01.561080 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-scjnv" event={"ID":"9db88b69-ae39-4adf-b372-2e65709d0eee","Type":"ContainerStarted","Data":"80383a39037f87d26cd3ebd7398ff9394726334c06434c27771cc170a4221d00"} Jan 04 11:50:01 crc kubenswrapper[5003]: I0104 11:50:01.609729 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-scjnv" podStartSLOduration=93.609705597 podStartE2EDuration="1m33.609705597s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:01.608870085 +0000 UTC m=+117.081899966" watchObservedRunningTime="2026-01-04 11:50:01.609705597 +0000 UTC m=+117.082735448" Jan 04 11:50:01 crc kubenswrapper[5003]: I0104 11:50:01.806283 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:01 crc kubenswrapper[5003]: E0104 11:50:01.806414 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:50:01 crc kubenswrapper[5003]: I0104 11:50:01.806631 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:01 crc kubenswrapper[5003]: E0104 11:50:01.806692 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:50:01 crc kubenswrapper[5003]: I0104 11:50:01.806802 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:01 crc kubenswrapper[5003]: E0104 11:50:01.806848 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:50:02 crc kubenswrapper[5003]: I0104 11:50:02.566388 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-np5qh_6e5d41d8-142e-4ca3-a20a-f6d338aaddf2/kube-multus/1.log" Jan 04 11:50:02 crc kubenswrapper[5003]: I0104 11:50:02.806343 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:50:02 crc kubenswrapper[5003]: E0104 11:50:02.806614 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:50:03 crc kubenswrapper[5003]: I0104 11:50:03.806810 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:03 crc kubenswrapper[5003]: I0104 11:50:03.806871 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:03 crc kubenswrapper[5003]: I0104 11:50:03.806810 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:03 crc kubenswrapper[5003]: E0104 11:50:03.807136 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:50:03 crc kubenswrapper[5003]: E0104 11:50:03.807305 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:50:03 crc kubenswrapper[5003]: E0104 11:50:03.807455 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:50:04 crc kubenswrapper[5003]: E0104 11:50:04.766574 5003 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Jan 04 11:50:04 crc kubenswrapper[5003]: I0104 11:50:04.806065 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:50:04 crc kubenswrapper[5003]: E0104 11:50:04.808127 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:50:04 crc kubenswrapper[5003]: E0104 11:50:04.916380 5003 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 04 11:50:05 crc kubenswrapper[5003]: I0104 11:50:05.806729 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:05 crc kubenswrapper[5003]: I0104 11:50:05.807787 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:05 crc kubenswrapper[5003]: I0104 11:50:05.807740 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:05 crc kubenswrapper[5003]: E0104 11:50:05.808415 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:50:05 crc kubenswrapper[5003]: E0104 11:50:05.808139 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:50:05 crc kubenswrapper[5003]: E0104 11:50:05.809298 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:50:05 crc kubenswrapper[5003]: I0104 11:50:05.809717 5003 scope.go:117] "RemoveContainer" containerID="f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010" Jan 04 11:50:06 crc kubenswrapper[5003]: I0104 11:50:06.584350 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lwxt_e40671d3-61d7-4a50-b4ea-a67e4005fc3f/ovnkube-controller/3.log" Jan 04 11:50:06 crc kubenswrapper[5003]: I0104 11:50:06.586990 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" event={"ID":"e40671d3-61d7-4a50-b4ea-a67e4005fc3f","Type":"ContainerStarted","Data":"76f6d35e6148e1ed7e1bfc323d651dc6017b638bb6af5c8da42247285e08b623"} Jan 04 11:50:06 crc kubenswrapper[5003]: I0104 11:50:06.587432 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:50:06 crc kubenswrapper[5003]: I0104 11:50:06.619510 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" podStartSLOduration=98.619485067 podStartE2EDuration="1m38.619485067s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:06.618715507 +0000 UTC m=+122.091745358" watchObservedRunningTime="2026-01-04 11:50:06.619485067 +0000 UTC m=+122.092514918" Jan 04 11:50:06 crc kubenswrapper[5003]: I0104 11:50:06.784953 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-n2zwh"] Jan 04 11:50:06 crc kubenswrapper[5003]: I0104 11:50:06.785103 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:50:06 crc kubenswrapper[5003]: E0104 11:50:06.785197 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:50:07 crc kubenswrapper[5003]: I0104 11:50:07.806782 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:07 crc kubenswrapper[5003]: I0104 11:50:07.806833 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:07 crc kubenswrapper[5003]: I0104 11:50:07.806955 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:07 crc kubenswrapper[5003]: E0104 11:50:07.807078 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:50:07 crc kubenswrapper[5003]: E0104 11:50:07.807219 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:50:07 crc kubenswrapper[5003]: E0104 11:50:07.807361 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:50:08 crc kubenswrapper[5003]: I0104 11:50:08.806868 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:50:08 crc kubenswrapper[5003]: E0104 11:50:08.807196 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:50:09 crc kubenswrapper[5003]: I0104 11:50:09.806813 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:09 crc kubenswrapper[5003]: I0104 11:50:09.806813 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:09 crc kubenswrapper[5003]: I0104 11:50:09.806978 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:09 crc kubenswrapper[5003]: E0104 11:50:09.807453 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:50:09 crc kubenswrapper[5003]: E0104 11:50:09.807685 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:50:09 crc kubenswrapper[5003]: E0104 11:50:09.807914 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:50:09 crc kubenswrapper[5003]: E0104 11:50:09.917918 5003 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 04 11:50:10 crc kubenswrapper[5003]: I0104 11:50:10.806804 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:50:10 crc kubenswrapper[5003]: E0104 11:50:10.807077 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:50:11 crc kubenswrapper[5003]: I0104 11:50:11.806301 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:11 crc kubenswrapper[5003]: I0104 11:50:11.806301 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:11 crc kubenswrapper[5003]: E0104 11:50:11.806530 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:50:11 crc kubenswrapper[5003]: E0104 11:50:11.806633 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:50:11 crc kubenswrapper[5003]: I0104 11:50:11.806806 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:11 crc kubenswrapper[5003]: E0104 11:50:11.807089 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:50:12 crc kubenswrapper[5003]: I0104 11:50:12.806361 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:50:12 crc kubenswrapper[5003]: E0104 11:50:12.806590 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:50:13 crc kubenswrapper[5003]: I0104 11:50:13.805926 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:13 crc kubenswrapper[5003]: I0104 11:50:13.805936 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:13 crc kubenswrapper[5003]: I0104 11:50:13.805964 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:13 crc kubenswrapper[5003]: E0104 11:50:13.806204 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:50:13 crc kubenswrapper[5003]: E0104 11:50:13.806409 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:50:13 crc kubenswrapper[5003]: E0104 11:50:13.806510 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:50:14 crc kubenswrapper[5003]: I0104 11:50:14.806608 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:50:14 crc kubenswrapper[5003]: E0104 11:50:14.808816 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:50:14 crc kubenswrapper[5003]: I0104 11:50:14.809108 5003 scope.go:117] "RemoveContainer" containerID="0cbf399dee48d6e9caa2594fb848b76a2dfc1afea412cbbf8a72f715f6c8e4e7" Jan 04 11:50:14 crc kubenswrapper[5003]: E0104 11:50:14.919424 5003 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 04 11:50:15 crc kubenswrapper[5003]: I0104 11:50:15.632497 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-np5qh_6e5d41d8-142e-4ca3-a20a-f6d338aaddf2/kube-multus/1.log" Jan 04 11:50:15 crc kubenswrapper[5003]: I0104 11:50:15.632556 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-np5qh" event={"ID":"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2","Type":"ContainerStarted","Data":"db992e1ff8ee746d747f10a8b6c8a30b540e1efe5581e111fa25a5cd5467774d"} Jan 04 11:50:15 crc kubenswrapper[5003]: I0104 11:50:15.806172 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:15 crc kubenswrapper[5003]: I0104 11:50:15.806275 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:15 crc kubenswrapper[5003]: I0104 11:50:15.806297 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:15 crc kubenswrapper[5003]: E0104 11:50:15.806482 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:50:15 crc kubenswrapper[5003]: E0104 11:50:15.806693 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:50:15 crc kubenswrapper[5003]: E0104 11:50:15.806899 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:50:16 crc kubenswrapper[5003]: I0104 11:50:16.806096 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:50:16 crc kubenswrapper[5003]: E0104 11:50:16.806864 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:50:17 crc kubenswrapper[5003]: I0104 11:50:17.805662 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:17 crc kubenswrapper[5003]: I0104 11:50:17.805759 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:17 crc kubenswrapper[5003]: E0104 11:50:17.805890 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:50:17 crc kubenswrapper[5003]: I0104 11:50:17.805757 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:17 crc kubenswrapper[5003]: E0104 11:50:17.806143 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:50:17 crc kubenswrapper[5003]: E0104 11:50:17.806229 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:50:18 crc kubenswrapper[5003]: I0104 11:50:18.806608 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:50:18 crc kubenswrapper[5003]: E0104 11:50:18.806874 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2zwh" podUID="cd6de6ec-2a7c-4842-9d8a-ba4032acb50e" Jan 04 11:50:19 crc kubenswrapper[5003]: I0104 11:50:19.806128 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:19 crc kubenswrapper[5003]: I0104 11:50:19.806262 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:19 crc kubenswrapper[5003]: I0104 11:50:19.806128 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:19 crc kubenswrapper[5003]: E0104 11:50:19.806391 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 04 11:50:19 crc kubenswrapper[5003]: E0104 11:50:19.806518 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 04 11:50:19 crc kubenswrapper[5003]: E0104 11:50:19.806659 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.806407 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.809876 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.810258 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.818863 5003 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.893628 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-tlh5b"] Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.894421 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-dslqq"] Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.894766 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-r6s9j"] Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.895134 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-r6s9j" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.895218 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-dslqq" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.895135 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.896081 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-tk9gz"] Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.896830 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-tk9gz" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.904567 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.904591 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.907168 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5"] Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.908145 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.909345 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-nxmj9"] Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.909915 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nxmj9" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.910376 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-fpmfh"] Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.911036 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fpmfh" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.920453 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.921316 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-n7vrk"] Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.938517 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-n7vrk" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.943401 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.944271 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.944520 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.944571 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.944955 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.944300 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.945215 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.945365 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.945160 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.945523 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.945542 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.945570 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.951604 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nxp6d"] Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.952335 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4k548"] Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.952808 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4k548" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.952834 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nxp6d" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.953195 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-stxr8"] Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.953841 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-dkgwj"] Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.953918 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.954072 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.954094 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.954124 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.954271 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.954352 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.954422 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.954435 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-dkgwj" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.954493 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.954617 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.954629 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.954618 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.953851 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-stxr8" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.954779 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.954828 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.954888 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.954904 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.954839 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.954983 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.955071 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.955122 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.955281 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.955296 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.955382 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.955393 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.955426 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.955385 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.955464 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.955518 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.955526 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.955557 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.955604 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.955637 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.955661 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.955724 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.955812 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.955918 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.956102 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.956681 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2g644"] Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.957342 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2g644" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.958135 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.958204 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.958248 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-8qsxn"] Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.958255 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.959089 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-8qsxn" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.959538 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-cd6dv"] Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.960061 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cd6dv" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.961166 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458785-q4d9v"] Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.962418 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-4m2tr"] Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.963289 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-4m2tr" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.962443 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-q4d9v" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.963847 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.964692 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.965143 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.965190 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6smzv"] Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.965887 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6smzv" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.966460 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-dphl6"] Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.966861 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.966937 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dphl6" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.967545 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.967832 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-cw9t7"] Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.968526 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-cw9t7" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.970038 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-86rlm"] Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.971093 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bsmnf"] Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.971448 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.971469 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.971573 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.971581 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-86rlm" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.971598 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bsmnf" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.971907 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.972043 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.972121 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.972378 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.973081 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.973833 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zgmzz"] Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.974702 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.993371 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.996939 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.998273 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-t6nrd"] Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.998723 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 04 11:50:20 crc kubenswrapper[5003]: I0104 11:50:20.999160 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.001595 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zgmzz" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.002268 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.002500 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.002304 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.010760 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.010838 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.011223 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.011300 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.011439 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.011480 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.011623 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.011638 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.011723 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.011860 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.011898 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.011984 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.012116 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.012306 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.012330 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.012331 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.012693 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.012748 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.012939 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013209 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f69f13d6-7550-471c-b84e-e62b06c17c9b-audit-policies\") pod \"apiserver-7bbb656c7d-jlfc5\" (UID: \"f69f13d6-7550-471c-b84e-e62b06c17c9b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013248 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7044cdc7-b7c2-454e-9460-8f6b783f85eb-config\") pod \"authentication-operator-69f744f599-n7vrk\" (UID: \"7044cdc7-b7c2-454e-9460-8f6b783f85eb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-n7vrk" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013287 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8006abe7-31d8-489d-9005-d96d40bb9ba5-serving-cert\") pod \"controller-manager-879f6c89f-tk9gz\" (UID: \"8006abe7-31d8-489d-9005-d96d40bb9ba5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-tk9gz" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013306 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f69f13d6-7550-471c-b84e-e62b06c17c9b-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-jlfc5\" (UID: \"f69f13d6-7550-471c-b84e-e62b06c17c9b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013330 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rp2h9\" (UniqueName: \"kubernetes.io/projected/f69f13d6-7550-471c-b84e-e62b06c17c9b-kube-api-access-rp2h9\") pod \"apiserver-7bbb656c7d-jlfc5\" (UID: \"f69f13d6-7550-471c-b84e-e62b06c17c9b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013347 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7044cdc7-b7c2-454e-9460-8f6b783f85eb-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-n7vrk\" (UID: \"7044cdc7-b7c2-454e-9460-8f6b783f85eb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-n7vrk" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013371 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f69f13d6-7550-471c-b84e-e62b06c17c9b-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-jlfc5\" (UID: \"f69f13d6-7550-471c-b84e-e62b06c17c9b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013391 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfjlz\" (UniqueName: \"kubernetes.io/projected/19e87eb5-dc5b-4012-b862-22c8083c247b-kube-api-access-dfjlz\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013421 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/19e87eb5-dc5b-4012-b862-22c8083c247b-etcd-client\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013446 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef5fdbe3-490c-476b-af83-a810d6e5a888-config\") pod \"machine-api-operator-5694c8668f-dslqq\" (UID: \"ef5fdbe3-490c-476b-af83-a810d6e5a888\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dslqq" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013472 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19e87eb5-dc5b-4012-b862-22c8083c247b-config\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013497 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/f2f9ef4d-14c5-47d9-9e7b-234cde2de773-machine-approver-tls\") pod \"machine-approver-56656f9798-fpmfh\" (UID: \"f2f9ef4d-14c5-47d9-9e7b-234cde2de773\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fpmfh" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013533 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19e87eb5-dc5b-4012-b862-22c8083c247b-serving-cert\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013551 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f2f9ef4d-14c5-47d9-9e7b-234cde2de773-auth-proxy-config\") pod \"machine-approver-56656f9798-fpmfh\" (UID: \"f2f9ef4d-14c5-47d9-9e7b-234cde2de773\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fpmfh" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013582 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2f9ef4d-14c5-47d9-9e7b-234cde2de773-config\") pod \"machine-approver-56656f9798-fpmfh\" (UID: \"f2f9ef4d-14c5-47d9-9e7b-234cde2de773\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fpmfh" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013601 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b9f2bf47-0b56-44cd-ba31-3e9a5320186d-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-r6s9j\" (UID: \"b9f2bf47-0b56-44cd-ba31-3e9a5320186d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-r6s9j" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013625 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/19e87eb5-dc5b-4012-b862-22c8083c247b-etcd-serving-ca\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013650 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/19e87eb5-dc5b-4012-b862-22c8083c247b-audit\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013667 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/19e87eb5-dc5b-4012-b862-22c8083c247b-trusted-ca-bundle\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013689 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f69f13d6-7550-471c-b84e-e62b06c17c9b-serving-cert\") pod \"apiserver-7bbb656c7d-jlfc5\" (UID: \"f69f13d6-7550-471c-b84e-e62b06c17c9b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013706 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ef5fdbe3-490c-476b-af83-a810d6e5a888-images\") pod \"machine-api-operator-5694c8668f-dslqq\" (UID: \"ef5fdbe3-490c-476b-af83-a810d6e5a888\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dslqq" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013724 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5xtf\" (UniqueName: \"kubernetes.io/projected/f2f9ef4d-14c5-47d9-9e7b-234cde2de773-kube-api-access-r5xtf\") pod \"machine-approver-56656f9798-fpmfh\" (UID: \"f2f9ef4d-14c5-47d9-9e7b-234cde2de773\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fpmfh" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013751 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/40dada5c-5a67-4362-b9fb-e49a7fc32307-client-ca\") pod \"route-controller-manager-6576b87f9c-nxmj9\" (UID: \"40dada5c-5a67-4362-b9fb-e49a7fc32307\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nxmj9" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013768 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2mvwh\" (UniqueName: \"kubernetes.io/projected/7044cdc7-b7c2-454e-9460-8f6b783f85eb-kube-api-access-2mvwh\") pod \"authentication-operator-69f744f599-n7vrk\" (UID: \"7044cdc7-b7c2-454e-9460-8f6b783f85eb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-n7vrk" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013794 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/ef5fdbe3-490c-476b-af83-a810d6e5a888-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-dslqq\" (UID: \"ef5fdbe3-490c-476b-af83-a810d6e5a888\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dslqq" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013814 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40dada5c-5a67-4362-b9fb-e49a7fc32307-config\") pod \"route-controller-manager-6576b87f9c-nxmj9\" (UID: \"40dada5c-5a67-4362-b9fb-e49a7fc32307\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nxmj9" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013833 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/19e87eb5-dc5b-4012-b862-22c8083c247b-encryption-config\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013862 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8006abe7-31d8-489d-9005-d96d40bb9ba5-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-tk9gz\" (UID: \"8006abe7-31d8-489d-9005-d96d40bb9ba5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-tk9gz" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013881 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/40dada5c-5a67-4362-b9fb-e49a7fc32307-serving-cert\") pod \"route-controller-manager-6576b87f9c-nxmj9\" (UID: \"40dada5c-5a67-4362-b9fb-e49a7fc32307\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nxmj9" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013902 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8006abe7-31d8-489d-9005-d96d40bb9ba5-config\") pod \"controller-manager-879f6c89f-tk9gz\" (UID: \"8006abe7-31d8-489d-9005-d96d40bb9ba5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-tk9gz" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013935 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/19e87eb5-dc5b-4012-b862-22c8083c247b-node-pullsecrets\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013952 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7044cdc7-b7c2-454e-9460-8f6b783f85eb-serving-cert\") pod \"authentication-operator-69f744f599-n7vrk\" (UID: \"7044cdc7-b7c2-454e-9460-8f6b783f85eb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-n7vrk" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013975 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8006abe7-31d8-489d-9005-d96d40bb9ba5-client-ca\") pod \"controller-manager-879f6c89f-tk9gz\" (UID: \"8006abe7-31d8-489d-9005-d96d40bb9ba5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-tk9gz" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.013994 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7044cdc7-b7c2-454e-9460-8f6b783f85eb-service-ca-bundle\") pod \"authentication-operator-69f744f599-n7vrk\" (UID: \"7044cdc7-b7c2-454e-9460-8f6b783f85eb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-n7vrk" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.014038 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f69f13d6-7550-471c-b84e-e62b06c17c9b-encryption-config\") pod \"apiserver-7bbb656c7d-jlfc5\" (UID: \"f69f13d6-7550-471c-b84e-e62b06c17c9b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.014059 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/19e87eb5-dc5b-4012-b862-22c8083c247b-image-import-ca\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.014080 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9f2bf47-0b56-44cd-ba31-3e9a5320186d-config\") pod \"openshift-apiserver-operator-796bbdcf4f-r6s9j\" (UID: \"b9f2bf47-0b56-44cd-ba31-3e9a5320186d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-r6s9j" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.014099 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55q26\" (UniqueName: \"kubernetes.io/projected/8006abe7-31d8-489d-9005-d96d40bb9ba5-kube-api-access-55q26\") pod \"controller-manager-879f6c89f-tk9gz\" (UID: \"8006abe7-31d8-489d-9005-d96d40bb9ba5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-tk9gz" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.014115 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ghpkx\" (UniqueName: \"kubernetes.io/projected/ef5fdbe3-490c-476b-af83-a810d6e5a888-kube-api-access-ghpkx\") pod \"machine-api-operator-5694c8668f-dslqq\" (UID: \"ef5fdbe3-490c-476b-af83-a810d6e5a888\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dslqq" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.014134 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f69f13d6-7550-471c-b84e-e62b06c17c9b-etcd-client\") pod \"apiserver-7bbb656c7d-jlfc5\" (UID: \"f69f13d6-7550-471c-b84e-e62b06c17c9b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.014154 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f69f13d6-7550-471c-b84e-e62b06c17c9b-audit-dir\") pod \"apiserver-7bbb656c7d-jlfc5\" (UID: \"f69f13d6-7550-471c-b84e-e62b06c17c9b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.014175 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/19e87eb5-dc5b-4012-b862-22c8083c247b-audit-dir\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.014198 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2w5q\" (UniqueName: \"kubernetes.io/projected/40dada5c-5a67-4362-b9fb-e49a7fc32307-kube-api-access-g2w5q\") pod \"route-controller-manager-6576b87f9c-nxmj9\" (UID: \"40dada5c-5a67-4362-b9fb-e49a7fc32307\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nxmj9" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.014216 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwqb7\" (UniqueName: \"kubernetes.io/projected/b9f2bf47-0b56-44cd-ba31-3e9a5320186d-kube-api-access-wwqb7\") pod \"openshift-apiserver-operator-796bbdcf4f-r6s9j\" (UID: \"b9f2bf47-0b56-44cd-ba31-3e9a5320186d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-r6s9j" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.014342 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.014886 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.015527 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.017153 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.017855 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-88l2c"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.018318 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.018478 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.018666 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.018790 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.018804 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-2jt99"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.018998 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.019736 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-t6nrd" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.020841 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-88l2c" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.021636 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.026212 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-2jt99" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.027272 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qgwds"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.028187 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qgwds" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.029471 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-b5flr"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.030319 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-b5flr" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.031829 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-5w6zp"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.034680 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jkdzt"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.035495 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-x47v9"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.036226 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-x47v9" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.036630 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5w6zp" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.036906 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jkdzt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.037607 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-pgx7w"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.038205 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pd2px"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.038818 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.040224 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-psjfm"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.040311 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-pgx7w" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.041067 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-psjfm" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.041407 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.046268 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-m6xcl"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.047351 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-m6xcl" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.049397 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-db64n"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.050354 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-db64n" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.050837 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-d2kjq"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.052914 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.056879 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q977v"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.057664 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q977v" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.058618 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.059720 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-r6s9j"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.061257 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.062328 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-n7vrk"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.064088 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4k548"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.064753 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-dslqq"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.066354 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458785-q4d9v"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.068290 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-8qsxn"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.070422 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2g644"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.073127 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-stxr8"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.077994 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bsmnf"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.079743 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-tk9gz"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.081415 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.083319 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-tlh5b"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.098746 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.101006 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-86rlm"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.114789 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-4m2tr"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.115643 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.115692 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f69f13d6-7550-471c-b84e-e62b06c17c9b-audit-policies\") pod \"apiserver-7bbb656c7d-jlfc5\" (UID: \"f69f13d6-7550-471c-b84e-e62b06c17c9b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.115720 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skx67\" (UniqueName: \"kubernetes.io/projected/87bcf788-c637-4ed6-a5da-650979a40203-kube-api-access-skx67\") pod \"router-default-5444994796-dkgwj\" (UID: \"87bcf788-c637-4ed6-a5da-650979a40203\") " pod="openshift-ingress/router-default-5444994796-dkgwj" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.115742 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/ca95e54c-a1d3-40a8-9bb4-fa314e9fe161-proxy-tls\") pod \"machine-config-operator-74547568cd-stxr8\" (UID: \"ca95e54c-a1d3-40a8-9bb4-fa314e9fe161\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-stxr8" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.115768 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.115791 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8006abe7-31d8-489d-9005-d96d40bb9ba5-serving-cert\") pod \"controller-manager-879f6c89f-tk9gz\" (UID: \"8006abe7-31d8-489d-9005-d96d40bb9ba5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-tk9gz" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.115811 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2tq9v\" (UniqueName: \"kubernetes.io/projected/8d27792d-685d-4a24-96f6-e38710e1cd6c-kube-api-access-2tq9v\") pod \"packageserver-d55dfcdfc-jkdzt\" (UID: \"8d27792d-685d-4a24-96f6-e38710e1cd6c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jkdzt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.115830 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqf8n\" (UniqueName: \"kubernetes.io/projected/664dcbda-0324-4957-84e5-309cbd624afc-kube-api-access-bqf8n\") pod \"dns-operator-744455d44c-8qsxn\" (UID: \"664dcbda-0324-4957-84e5-309cbd624afc\") " pod="openshift-dns-operator/dns-operator-744455d44c-8qsxn" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.115849 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/831746c3-7123-4189-a551-7f7852402807-serving-cert\") pod \"console-operator-58897d9998-cw9t7\" (UID: \"831746c3-7123-4189-a551-7f7852402807\") " pod="openshift-console-operator/console-operator-58897d9998-cw9t7" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.115878 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f69f13d6-7550-471c-b84e-e62b06c17c9b-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-jlfc5\" (UID: \"f69f13d6-7550-471c-b84e-e62b06c17c9b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.115922 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rp2h9\" (UniqueName: \"kubernetes.io/projected/f69f13d6-7550-471c-b84e-e62b06c17c9b-kube-api-access-rp2h9\") pod \"apiserver-7bbb656c7d-jlfc5\" (UID: \"f69f13d6-7550-471c-b84e-e62b06c17c9b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.115944 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7044cdc7-b7c2-454e-9460-8f6b783f85eb-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-n7vrk\" (UID: \"7044cdc7-b7c2-454e-9460-8f6b783f85eb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-n7vrk" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.115994 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-496f7\" (UniqueName: \"kubernetes.io/projected/b5684173-3dc4-40bb-8424-096e087c1afd-kube-api-access-496f7\") pod \"machine-config-controller-84d6567774-5w6zp\" (UID: \"b5684173-3dc4-40bb-8424-096e087c1afd\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5w6zp" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116036 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f69f13d6-7550-471c-b84e-e62b06c17c9b-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-jlfc5\" (UID: \"f69f13d6-7550-471c-b84e-e62b06c17c9b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116064 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfjlz\" (UniqueName: \"kubernetes.io/projected/19e87eb5-dc5b-4012-b862-22c8083c247b-kube-api-access-dfjlz\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116089 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/87bcf788-c637-4ed6-a5da-650979a40203-metrics-certs\") pod \"router-default-5444994796-dkgwj\" (UID: \"87bcf788-c637-4ed6-a5da-650979a40203\") " pod="openshift-ingress/router-default-5444994796-dkgwj" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116116 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3d1074f-cb7e-4fa5-9477-e6c1835b6488-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-psjfm\" (UID: \"e3d1074f-cb7e-4fa5-9477-e6c1835b6488\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-psjfm" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116145 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/508261e1-05b6-486d-9724-768d8729d7dd-audit-dir\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116168 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/9f843c7a-a6a4-4d2d-976f-0a955e8a2777-profile-collector-cert\") pod \"catalog-operator-68c6474976-t6nrd\" (UID: \"9f843c7a-a6a4-4d2d-976f-0a955e8a2777\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-t6nrd" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116189 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/19e87eb5-dc5b-4012-b862-22c8083c247b-etcd-client\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116231 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/9f843c7a-a6a4-4d2d-976f-0a955e8a2777-srv-cert\") pod \"catalog-operator-68c6474976-t6nrd\" (UID: \"9f843c7a-a6a4-4d2d-976f-0a955e8a2777\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-t6nrd" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116251 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116271 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19e87eb5-dc5b-4012-b862-22c8083c247b-config\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116291 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a5f82de9-292f-47ab-9590-00f63a73a25d-bound-sa-token\") pod \"ingress-operator-5b745b69d9-dphl6\" (UID: \"a5f82de9-292f-47ab-9590-00f63a73a25d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dphl6" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116310 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/9ab7be4d-a4d6-4857-ba26-2ade2b6b3917-signing-key\") pod \"service-ca-9c57cc56f-x47v9\" (UID: \"9ab7be4d-a4d6-4857-ba26-2ade2b6b3917\") " pod="openshift-service-ca/service-ca-9c57cc56f-x47v9" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116328 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116353 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b43f39ef-60cd-44be-8061-715fbf71a36b-oauth-serving-cert\") pod \"console-f9d7485db-2jt99\" (UID: \"b43f39ef-60cd-44be-8061-715fbf71a36b\") " pod="openshift-console/console-f9d7485db-2jt99" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116373 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqgp5\" (UniqueName: \"kubernetes.io/projected/a5f82de9-292f-47ab-9590-00f63a73a25d-kube-api-access-lqgp5\") pod \"ingress-operator-5b745b69d9-dphl6\" (UID: \"a5f82de9-292f-47ab-9590-00f63a73a25d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dphl6" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116393 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bv66\" (UniqueName: \"kubernetes.io/projected/508261e1-05b6-486d-9724-768d8729d7dd-kube-api-access-8bv66\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116418 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/19e87eb5-dc5b-4012-b862-22c8083c247b-etcd-serving-ca\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116437 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b9f2bf47-0b56-44cd-ba31-3e9a5320186d-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-r6s9j\" (UID: \"b9f2bf47-0b56-44cd-ba31-3e9a5320186d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-r6s9j" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116456 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8d27792d-685d-4a24-96f6-e38710e1cd6c-webhook-cert\") pod \"packageserver-d55dfcdfc-jkdzt\" (UID: \"8d27792d-685d-4a24-96f6-e38710e1cd6c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jkdzt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116476 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zs2xl\" (UniqueName: \"kubernetes.io/projected/e099f402-eb6b-4a1c-b5f6-f713fff68945-kube-api-access-zs2xl\") pod \"multus-admission-controller-857f4d67dd-88l2c\" (UID: \"e099f402-eb6b-4a1c-b5f6-f713fff68945\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-88l2c" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116483 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f69f13d6-7550-471c-b84e-e62b06c17c9b-audit-policies\") pod \"apiserver-7bbb656c7d-jlfc5\" (UID: \"f69f13d6-7550-471c-b84e-e62b06c17c9b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116519 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/831746c3-7123-4189-a551-7f7852402807-trusted-ca\") pod \"console-operator-58897d9998-cw9t7\" (UID: \"831746c3-7123-4189-a551-7f7852402807\") " pod="openshift-console-operator/console-operator-58897d9998-cw9t7" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116545 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/dfb130da-bcbf-46d8-8e7e-2b1f17fe71ce-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-2g644\" (UID: \"dfb130da-bcbf-46d8-8e7e-2b1f17fe71ce\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2g644" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116564 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b43f39ef-60cd-44be-8061-715fbf71a36b-console-serving-cert\") pod \"console-f9d7485db-2jt99\" (UID: \"b43f39ef-60cd-44be-8061-715fbf71a36b\") " pod="openshift-console/console-f9d7485db-2jt99" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116584 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a7865114-eb86-4994-b91d-3c39f9cee6a5-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-6smzv\" (UID: \"a7865114-eb86-4994-b91d-3c39f9cee6a5\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6smzv" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116604 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7865114-eb86-4994-b91d-3c39f9cee6a5-config\") pod \"kube-controller-manager-operator-78b949d7b-6smzv\" (UID: \"a7865114-eb86-4994-b91d-3c39f9cee6a5\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6smzv" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116623 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f69f13d6-7550-471c-b84e-e62b06c17c9b-serving-cert\") pod \"apiserver-7bbb656c7d-jlfc5\" (UID: \"f69f13d6-7550-471c-b84e-e62b06c17c9b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116644 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d7dccbf4-f7cc-4306-b244-605f2849a805-config\") pod \"etcd-operator-b45778765-86rlm\" (UID: \"d7dccbf4-f7cc-4306-b244-605f2849a805\") " pod="openshift-etcd-operator/etcd-operator-b45778765-86rlm" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116665 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5xtf\" (UniqueName: \"kubernetes.io/projected/f2f9ef4d-14c5-47d9-9e7b-234cde2de773-kube-api-access-r5xtf\") pod \"machine-approver-56656f9798-fpmfh\" (UID: \"f2f9ef4d-14c5-47d9-9e7b-234cde2de773\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fpmfh" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116688 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ef5fdbe3-490c-476b-af83-a810d6e5a888-images\") pod \"machine-api-operator-5694c8668f-dslqq\" (UID: \"ef5fdbe3-490c-476b-af83-a810d6e5a888\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dslqq" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116708 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40dada5c-5a67-4362-b9fb-e49a7fc32307-config\") pod \"route-controller-manager-6576b87f9c-nxmj9\" (UID: \"40dada5c-5a67-4362-b9fb-e49a7fc32307\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nxmj9" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116726 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/40dada5c-5a67-4362-b9fb-e49a7fc32307-client-ca\") pod \"route-controller-manager-6576b87f9c-nxmj9\" (UID: \"40dada5c-5a67-4362-b9fb-e49a7fc32307\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nxmj9" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116743 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8d27792d-685d-4a24-96f6-e38710e1cd6c-apiservice-cert\") pod \"packageserver-d55dfcdfc-jkdzt\" (UID: \"8d27792d-685d-4a24-96f6-e38710e1cd6c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jkdzt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116764 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/ef5fdbe3-490c-476b-af83-a810d6e5a888-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-dslqq\" (UID: \"ef5fdbe3-490c-476b-af83-a810d6e5a888\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dslqq" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116783 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/40dada5c-5a67-4362-b9fb-e49a7fc32307-serving-cert\") pod \"route-controller-manager-6576b87f9c-nxmj9\" (UID: \"40dada5c-5a67-4362-b9fb-e49a7fc32307\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nxmj9" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116805 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/831746c3-7123-4189-a551-7f7852402807-config\") pod \"console-operator-58897d9998-cw9t7\" (UID: \"831746c3-7123-4189-a551-7f7852402807\") " pod="openshift-console-operator/console-operator-58897d9998-cw9t7" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116825 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b5684173-3dc4-40bb-8424-096e087c1afd-proxy-tls\") pod \"machine-config-controller-84d6567774-5w6zp\" (UID: \"b5684173-3dc4-40bb-8424-096e087c1afd\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5w6zp" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116845 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thswx\" (UniqueName: \"kubernetes.io/projected/56927c4a-a76e-4c0e-b2d4-2ff8cf31d8eb-kube-api-access-thswx\") pod \"migrator-59844c95c7-b5flr\" (UID: \"56927c4a-a76e-4c0e-b2d4-2ff8cf31d8eb\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-b5flr" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116864 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116894 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8006abe7-31d8-489d-9005-d96d40bb9ba5-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-tk9gz\" (UID: \"8006abe7-31d8-489d-9005-d96d40bb9ba5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-tk9gz" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116914 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/8d27792d-685d-4a24-96f6-e38710e1cd6c-tmpfs\") pod \"packageserver-d55dfcdfc-jkdzt\" (UID: \"8d27792d-685d-4a24-96f6-e38710e1cd6c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jkdzt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116916 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f69f13d6-7550-471c-b84e-e62b06c17c9b-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-jlfc5\" (UID: \"f69f13d6-7550-471c-b84e-e62b06c17c9b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116933 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhlcj\" (UniqueName: \"kubernetes.io/projected/ca95e54c-a1d3-40a8-9bb4-fa314e9fe161-kube-api-access-fhlcj\") pod \"machine-config-operator-74547568cd-stxr8\" (UID: \"ca95e54c-a1d3-40a8-9bb4-fa314e9fe161\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-stxr8" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.116975 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjlg4\" (UniqueName: \"kubernetes.io/projected/c99d5981-7c0d-49d6-8b8e-8cb2c8c1027a-kube-api-access-sjlg4\") pod \"control-plane-machine-set-operator-78cbb6b69f-zgmzz\" (UID: \"c99d5981-7c0d-49d6-8b8e-8cb2c8c1027a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zgmzz" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.117902 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/19e87eb5-dc5b-4012-b862-22c8083c247b-etcd-serving-ca\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.118038 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7044cdc7-b7c2-454e-9460-8f6b783f85eb-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-n7vrk\" (UID: \"7044cdc7-b7c2-454e-9460-8f6b783f85eb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-n7vrk" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.118464 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6smzv"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.118719 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ef5fdbe3-490c-476b-af83-a810d6e5a888-images\") pod \"machine-api-operator-5694c8668f-dslqq\" (UID: \"ef5fdbe3-490c-476b-af83-a810d6e5a888\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dslqq" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.118976 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/40dada5c-5a67-4362-b9fb-e49a7fc32307-client-ca\") pod \"route-controller-manager-6576b87f9c-nxmj9\" (UID: \"40dada5c-5a67-4362-b9fb-e49a7fc32307\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nxmj9" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.119070 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40dada5c-5a67-4362-b9fb-e49a7fc32307-config\") pod \"route-controller-manager-6576b87f9c-nxmj9\" (UID: \"40dada5c-5a67-4362-b9fb-e49a7fc32307\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nxmj9" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.119129 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f69f13d6-7550-471c-b84e-e62b06c17c9b-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-jlfc5\" (UID: \"f69f13d6-7550-471c-b84e-e62b06c17c9b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.119248 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-cd6dv"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.119878 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8006abe7-31d8-489d-9005-d96d40bb9ba5-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-tk9gz\" (UID: \"8006abe7-31d8-489d-9005-d96d40bb9ba5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-tk9gz" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.120363 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19e87eb5-dc5b-4012-b862-22c8083c247b-config\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.120259 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-dphl6"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.120415 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45x9h\" (UniqueName: \"kubernetes.io/projected/1886cfcf-db6d-49b2-8f0a-4637996373db-kube-api-access-45x9h\") pod \"collect-profiles-29458785-q4d9v\" (UID: \"1886cfcf-db6d-49b2-8f0a-4637996373db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-q4d9v" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.120528 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/19e87eb5-dc5b-4012-b862-22c8083c247b-node-pullsecrets\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.120575 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b43f39ef-60cd-44be-8061-715fbf71a36b-console-config\") pod \"console-f9d7485db-2jt99\" (UID: \"b43f39ef-60cd-44be-8061-715fbf71a36b\") " pod="openshift-console/console-f9d7485db-2jt99" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.120609 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.120659 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62eb9818-98b0-40f2-9629-78923c5b112f-serving-cert\") pod \"openshift-config-operator-7777fb866f-cd6dv\" (UID: \"62eb9818-98b0-40f2-9629-78923c5b112f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cd6dv" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.120686 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/62eb9818-98b0-40f2-9629-78923c5b112f-available-featuregates\") pod \"openshift-config-operator-7777fb866f-cd6dv\" (UID: \"62eb9818-98b0-40f2-9629-78923c5b112f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cd6dv" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.120708 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2d6j\" (UniqueName: \"kubernetes.io/projected/cec0c8d3-e486-4b4c-8f49-9a04926b7f05-kube-api-access-z2d6j\") pod \"package-server-manager-789f6589d5-qgwds\" (UID: \"cec0c8d3-e486-4b4c-8f49-9a04926b7f05\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qgwds" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.120739 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8006abe7-31d8-489d-9005-d96d40bb9ba5-client-ca\") pod \"controller-manager-879f6c89f-tk9gz\" (UID: \"8006abe7-31d8-489d-9005-d96d40bb9ba5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-tk9gz" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.120760 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7044cdc7-b7c2-454e-9460-8f6b783f85eb-service-ca-bundle\") pod \"authentication-operator-69f744f599-n7vrk\" (UID: \"7044cdc7-b7c2-454e-9460-8f6b783f85eb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-n7vrk" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.120780 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b46t4\" (UniqueName: \"kubernetes.io/projected/8e5b9cf7-2752-4ac4-804a-7b88df12b435-kube-api-access-b46t4\") pod \"service-ca-operator-777779d784-4m2tr\" (UID: \"8e5b9cf7-2752-4ac4-804a-7b88df12b435\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-4m2tr" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.120800 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69drk\" (UniqueName: \"kubernetes.io/projected/dfb130da-bcbf-46d8-8e7e-2b1f17fe71ce-kube-api-access-69drk\") pod \"cluster-samples-operator-665b6dd947-2g644\" (UID: \"dfb130da-bcbf-46d8-8e7e-2b1f17fe71ce\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2g644" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.121834 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dnv27\" (UniqueName: \"kubernetes.io/projected/9ab7be4d-a4d6-4857-ba26-2ade2b6b3917-kube-api-access-dnv27\") pod \"service-ca-9c57cc56f-x47v9\" (UID: \"9ab7be4d-a4d6-4857-ba26-2ade2b6b3917\") " pod="openshift-service-ca/service-ca-9c57cc56f-x47v9" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.121879 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3d1074f-cb7e-4fa5-9477-e6c1835b6488-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-psjfm\" (UID: \"e3d1074f-cb7e-4fa5-9477-e6c1835b6488\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-psjfm" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.121904 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a5f82de9-292f-47ab-9590-00f63a73a25d-metrics-tls\") pod \"ingress-operator-5b745b69d9-dphl6\" (UID: \"a5f82de9-292f-47ab-9590-00f63a73a25d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dphl6" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.121930 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/27951f81-2e39-4d40-a0f9-cd6f66265a41-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-bsmnf\" (UID: \"27951f81-2e39-4d40-a0f9-cd6f66265a41\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bsmnf" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.121955 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/5681e3b7-2931-4ebe-a12c-872ad9ab9906-srv-cert\") pod \"olm-operator-6b444d44fb-nxp6d\" (UID: \"5681e3b7-2931-4ebe-a12c-872ad9ab9906\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nxp6d" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122086 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pks9v\" (UniqueName: \"kubernetes.io/projected/62eb9818-98b0-40f2-9629-78923c5b112f-kube-api-access-pks9v\") pod \"openshift-config-operator-7777fb866f-cd6dv\" (UID: \"62eb9818-98b0-40f2-9629-78923c5b112f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cd6dv" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122119 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ghpkx\" (UniqueName: \"kubernetes.io/projected/ef5fdbe3-490c-476b-af83-a810d6e5a888-kube-api-access-ghpkx\") pod \"machine-api-operator-5694c8668f-dslqq\" (UID: \"ef5fdbe3-490c-476b-af83-a810d6e5a888\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dslqq" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122140 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f69f13d6-7550-471c-b84e-e62b06c17c9b-etcd-client\") pod \"apiserver-7bbb656c7d-jlfc5\" (UID: \"f69f13d6-7550-471c-b84e-e62b06c17c9b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122162 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/19e87eb5-dc5b-4012-b862-22c8083c247b-audit-dir\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122214 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/87bcf788-c637-4ed6-a5da-650979a40203-default-certificate\") pod \"router-default-5444994796-dkgwj\" (UID: \"87bcf788-c637-4ed6-a5da-650979a40203\") " pod="openshift-ingress/router-default-5444994796-dkgwj" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122240 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/508261e1-05b6-486d-9724-768d8729d7dd-audit-policies\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122243 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122261 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/759c4131-a27e-4a35-b609-6a431eff05a6-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-4k548\" (UID: \"759c4131-a27e-4a35-b609-6a431eff05a6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4k548" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122302 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7044cdc7-b7c2-454e-9460-8f6b783f85eb-config\") pod \"authentication-operator-69f744f599-n7vrk\" (UID: \"7044cdc7-b7c2-454e-9460-8f6b783f85eb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-n7vrk" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122324 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ca95e54c-a1d3-40a8-9bb4-fa314e9fe161-images\") pod \"machine-config-operator-74547568cd-stxr8\" (UID: \"ca95e54c-a1d3-40a8-9bb4-fa314e9fe161\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-stxr8" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122344 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87sxh\" (UniqueName: \"kubernetes.io/projected/759c4131-a27e-4a35-b609-6a431eff05a6-kube-api-access-87sxh\") pod \"openshift-controller-manager-operator-756b6f6bc6-4k548\" (UID: \"759c4131-a27e-4a35-b609-6a431eff05a6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4k548" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122361 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a7865114-eb86-4994-b91d-3c39f9cee6a5-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-6smzv\" (UID: \"a7865114-eb86-4994-b91d-3c39f9cee6a5\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6smzv" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122383 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/cec0c8d3-e486-4b4c-8f49-9a04926b7f05-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-qgwds\" (UID: \"cec0c8d3-e486-4b4c-8f49-9a04926b7f05\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qgwds" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122402 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/d7dccbf4-f7cc-4306-b244-605f2849a805-etcd-service-ca\") pod \"etcd-operator-b45778765-86rlm\" (UID: \"d7dccbf4-f7cc-4306-b244-605f2849a805\") " pod="openshift-etcd-operator/etcd-operator-b45778765-86rlm" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122498 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122528 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27951f81-2e39-4d40-a0f9-cd6f66265a41-config\") pod \"kube-apiserver-operator-766d6c64bb-bsmnf\" (UID: \"27951f81-2e39-4d40-a0f9-cd6f66265a41\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bsmnf" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122548 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b43f39ef-60cd-44be-8061-715fbf71a36b-console-oauth-config\") pod \"console-f9d7485db-2jt99\" (UID: \"b43f39ef-60cd-44be-8061-715fbf71a36b\") " pod="openshift-console/console-f9d7485db-2jt99" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122567 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crf55\" (UniqueName: \"kubernetes.io/projected/9f843c7a-a6a4-4d2d-976f-0a955e8a2777-kube-api-access-crf55\") pod \"catalog-operator-68c6474976-t6nrd\" (UID: \"9f843c7a-a6a4-4d2d-976f-0a955e8a2777\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-t6nrd" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122586 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/5681e3b7-2931-4ebe-a12c-872ad9ab9906-profile-collector-cert\") pod \"olm-operator-6b444d44fb-nxp6d\" (UID: \"5681e3b7-2931-4ebe-a12c-872ad9ab9906\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nxp6d" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122715 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e5b9cf7-2752-4ac4-804a-7b88df12b435-serving-cert\") pod \"service-ca-operator-777779d784-4m2tr\" (UID: \"8e5b9cf7-2752-4ac4-804a-7b88df12b435\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-4m2tr" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122735 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122758 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef5fdbe3-490c-476b-af83-a810d6e5a888-config\") pod \"machine-api-operator-5694c8668f-dslqq\" (UID: \"ef5fdbe3-490c-476b-af83-a810d6e5a888\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dslqq" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122775 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/d7dccbf4-f7cc-4306-b244-605f2849a805-etcd-ca\") pod \"etcd-operator-b45778765-86rlm\" (UID: \"d7dccbf4-f7cc-4306-b244-605f2849a805\") " pod="openshift-etcd-operator/etcd-operator-b45778765-86rlm" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122796 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/87bcf788-c637-4ed6-a5da-650979a40203-stats-auth\") pod \"router-default-5444994796-dkgwj\" (UID: \"87bcf788-c637-4ed6-a5da-650979a40203\") " pod="openshift-ingress/router-default-5444994796-dkgwj" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122816 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b5684173-3dc4-40bb-8424-096e087c1afd-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-5w6zp\" (UID: \"b5684173-3dc4-40bb-8424-096e087c1afd\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5w6zp" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122837 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1886cfcf-db6d-49b2-8f0a-4637996373db-secret-volume\") pod \"collect-profiles-29458785-q4d9v\" (UID: \"1886cfcf-db6d-49b2-8f0a-4637996373db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-q4d9v" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122859 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/f2f9ef4d-14c5-47d9-9e7b-234cde2de773-machine-approver-tls\") pod \"machine-approver-56656f9798-fpmfh\" (UID: \"f2f9ef4d-14c5-47d9-9e7b-234cde2de773\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fpmfh" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122890 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19e87eb5-dc5b-4012-b862-22c8083c247b-serving-cert\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122908 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/27951f81-2e39-4d40-a0f9-cd6f66265a41-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-bsmnf\" (UID: \"27951f81-2e39-4d40-a0f9-cd6f66265a41\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bsmnf" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122930 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d7dccbf4-f7cc-4306-b244-605f2849a805-serving-cert\") pod \"etcd-operator-b45778765-86rlm\" (UID: \"d7dccbf4-f7cc-4306-b244-605f2849a805\") " pod="openshift-etcd-operator/etcd-operator-b45778765-86rlm" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122956 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f2f9ef4d-14c5-47d9-9e7b-234cde2de773-auth-proxy-config\") pod \"machine-approver-56656f9798-fpmfh\" (UID: \"f2f9ef4d-14c5-47d9-9e7b-234cde2de773\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fpmfh" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122973 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hq88c\" (UniqueName: \"kubernetes.io/projected/b43f39ef-60cd-44be-8061-715fbf71a36b-kube-api-access-hq88c\") pod \"console-f9d7485db-2jt99\" (UID: \"b43f39ef-60cd-44be-8061-715fbf71a36b\") " pod="openshift-console/console-f9d7485db-2jt99" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.122993 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2f9ef4d-14c5-47d9-9e7b-234cde2de773-config\") pod \"machine-approver-56656f9798-fpmfh\" (UID: \"f2f9ef4d-14c5-47d9-9e7b-234cde2de773\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fpmfh" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123053 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b43f39ef-60cd-44be-8061-715fbf71a36b-trusted-ca-bundle\") pod \"console-f9d7485db-2jt99\" (UID: \"b43f39ef-60cd-44be-8061-715fbf71a36b\") " pod="openshift-console/console-f9d7485db-2jt99" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123077 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123096 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/c99d5981-7c0d-49d6-8b8e-8cb2c8c1027a-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-zgmzz\" (UID: \"c99d5981-7c0d-49d6-8b8e-8cb2c8c1027a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zgmzz" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123121 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/19e87eb5-dc5b-4012-b862-22c8083c247b-audit\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123141 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/19e87eb5-dc5b-4012-b862-22c8083c247b-trusted-ca-bundle\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123161 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e3d1074f-cb7e-4fa5-9477-e6c1835b6488-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-psjfm\" (UID: \"e3d1074f-cb7e-4fa5-9477-e6c1835b6488\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-psjfm" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123181 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xj5cl\" (UniqueName: \"kubernetes.io/projected/d7dccbf4-f7cc-4306-b244-605f2849a805-kube-api-access-xj5cl\") pod \"etcd-operator-b45778765-86rlm\" (UID: \"d7dccbf4-f7cc-4306-b244-605f2849a805\") " pod="openshift-etcd-operator/etcd-operator-b45778765-86rlm" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123208 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2mvwh\" (UniqueName: \"kubernetes.io/projected/7044cdc7-b7c2-454e-9460-8f6b783f85eb-kube-api-access-2mvwh\") pod \"authentication-operator-69f744f599-n7vrk\" (UID: \"7044cdc7-b7c2-454e-9460-8f6b783f85eb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-n7vrk" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123224 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/759c4131-a27e-4a35-b609-6a431eff05a6-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-4k548\" (UID: \"759c4131-a27e-4a35-b609-6a431eff05a6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4k548" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123244 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/19e87eb5-dc5b-4012-b862-22c8083c247b-encryption-config\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123241 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b9f2bf47-0b56-44cd-ba31-3e9a5320186d-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-r6s9j\" (UID: \"b9f2bf47-0b56-44cd-ba31-3e9a5320186d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-r6s9j" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123265 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8006abe7-31d8-489d-9005-d96d40bb9ba5-config\") pod \"controller-manager-879f6c89f-tk9gz\" (UID: \"8006abe7-31d8-489d-9005-d96d40bb9ba5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-tk9gz" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123284 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d7dccbf4-f7cc-4306-b244-605f2849a805-etcd-client\") pod \"etcd-operator-b45778765-86rlm\" (UID: \"d7dccbf4-f7cc-4306-b244-605f2849a805\") " pod="openshift-etcd-operator/etcd-operator-b45778765-86rlm" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123313 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ca95e54c-a1d3-40a8-9bb4-fa314e9fe161-auth-proxy-config\") pod \"machine-config-operator-74547568cd-stxr8\" (UID: \"ca95e54c-a1d3-40a8-9bb4-fa314e9fe161\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-stxr8" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123333 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123350 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7044cdc7-b7c2-454e-9460-8f6b783f85eb-serving-cert\") pod \"authentication-operator-69f744f599-n7vrk\" (UID: \"7044cdc7-b7c2-454e-9460-8f6b783f85eb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-n7vrk" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123366 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1886cfcf-db6d-49b2-8f0a-4637996373db-config-volume\") pod \"collect-profiles-29458785-q4d9v\" (UID: \"1886cfcf-db6d-49b2-8f0a-4637996373db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-q4d9v" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123383 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6wrk\" (UniqueName: \"kubernetes.io/projected/831746c3-7123-4189-a551-7f7852402807-kube-api-access-x6wrk\") pod \"console-operator-58897d9998-cw9t7\" (UID: \"831746c3-7123-4189-a551-7f7852402807\") " pod="openshift-console-operator/console-operator-58897d9998-cw9t7" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123399 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a5f82de9-292f-47ab-9590-00f63a73a25d-trusted-ca\") pod \"ingress-operator-5b745b69d9-dphl6\" (UID: \"a5f82de9-292f-47ab-9590-00f63a73a25d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dphl6" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123418 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6vq7\" (UniqueName: \"kubernetes.io/projected/5681e3b7-2931-4ebe-a12c-872ad9ab9906-kube-api-access-d6vq7\") pod \"olm-operator-6b444d44fb-nxp6d\" (UID: \"5681e3b7-2931-4ebe-a12c-872ad9ab9906\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nxp6d" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123436 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b43f39ef-60cd-44be-8061-715fbf71a36b-service-ca\") pod \"console-f9d7485db-2jt99\" (UID: \"b43f39ef-60cd-44be-8061-715fbf71a36b\") " pod="openshift-console/console-f9d7485db-2jt99" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123575 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e5b9cf7-2752-4ac4-804a-7b88df12b435-config\") pod \"service-ca-operator-777779d784-4m2tr\" (UID: \"8e5b9cf7-2752-4ac4-804a-7b88df12b435\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-4m2tr" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123594 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/9ab7be4d-a4d6-4857-ba26-2ade2b6b3917-signing-cabundle\") pod \"service-ca-9c57cc56f-x47v9\" (UID: \"9ab7be4d-a4d6-4857-ba26-2ade2b6b3917\") " pod="openshift-service-ca/service-ca-9c57cc56f-x47v9" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123620 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9f2bf47-0b56-44cd-ba31-3e9a5320186d-config\") pod \"openshift-apiserver-operator-796bbdcf4f-r6s9j\" (UID: \"b9f2bf47-0b56-44cd-ba31-3e9a5320186d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-r6s9j" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123642 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123665 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f69f13d6-7550-471c-b84e-e62b06c17c9b-encryption-config\") pod \"apiserver-7bbb656c7d-jlfc5\" (UID: \"f69f13d6-7550-471c-b84e-e62b06c17c9b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123682 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/19e87eb5-dc5b-4012-b862-22c8083c247b-image-import-ca\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123702 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55q26\" (UniqueName: \"kubernetes.io/projected/8006abe7-31d8-489d-9005-d96d40bb9ba5-kube-api-access-55q26\") pod \"controller-manager-879f6c89f-tk9gz\" (UID: \"8006abe7-31d8-489d-9005-d96d40bb9ba5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-tk9gz" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123720 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f69f13d6-7550-471c-b84e-e62b06c17c9b-audit-dir\") pod \"apiserver-7bbb656c7d-jlfc5\" (UID: \"f69f13d6-7550-471c-b84e-e62b06c17c9b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123740 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jjx2\" (UniqueName: \"kubernetes.io/projected/92ada4c8-acb9-4740-85dc-815cd8a3b028-kube-api-access-5jjx2\") pod \"downloads-7954f5f757-pgx7w\" (UID: \"92ada4c8-acb9-4740-85dc-815cd8a3b028\") " pod="openshift-console/downloads-7954f5f757-pgx7w" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123758 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/664dcbda-0324-4957-84e5-309cbd624afc-metrics-tls\") pod \"dns-operator-744455d44c-8qsxn\" (UID: \"664dcbda-0324-4957-84e5-309cbd624afc\") " pod="openshift-dns-operator/dns-operator-744455d44c-8qsxn" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123778 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e099f402-eb6b-4a1c-b5f6-f713fff68945-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-88l2c\" (UID: \"e099f402-eb6b-4a1c-b5f6-f713fff68945\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-88l2c" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123800 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2w5q\" (UniqueName: \"kubernetes.io/projected/40dada5c-5a67-4362-b9fb-e49a7fc32307-kube-api-access-g2w5q\") pod \"route-controller-manager-6576b87f9c-nxmj9\" (UID: \"40dada5c-5a67-4362-b9fb-e49a7fc32307\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nxmj9" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123819 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwqb7\" (UniqueName: \"kubernetes.io/projected/b9f2bf47-0b56-44cd-ba31-3e9a5320186d-kube-api-access-wwqb7\") pod \"openshift-apiserver-operator-796bbdcf4f-r6s9j\" (UID: \"b9f2bf47-0b56-44cd-ba31-3e9a5320186d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-r6s9j" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123835 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/87bcf788-c637-4ed6-a5da-650979a40203-service-ca-bundle\") pod \"router-default-5444994796-dkgwj\" (UID: \"87bcf788-c637-4ed6-a5da-650979a40203\") " pod="openshift-ingress/router-default-5444994796-dkgwj" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.123937 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/19e87eb5-dc5b-4012-b862-22c8083c247b-node-pullsecrets\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.124090 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/40dada5c-5a67-4362-b9fb-e49a7fc32307-serving-cert\") pod \"route-controller-manager-6576b87f9c-nxmj9\" (UID: \"40dada5c-5a67-4362-b9fb-e49a7fc32307\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nxmj9" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.124261 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2f9ef4d-14c5-47d9-9e7b-234cde2de773-config\") pod \"machine-approver-56656f9798-fpmfh\" (UID: \"f2f9ef4d-14c5-47d9-9e7b-234cde2de773\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fpmfh" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.124658 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f2f9ef4d-14c5-47d9-9e7b-234cde2de773-auth-proxy-config\") pod \"machine-approver-56656f9798-fpmfh\" (UID: \"f2f9ef4d-14c5-47d9-9e7b-234cde2de773\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fpmfh" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.124722 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/19e87eb5-dc5b-4012-b862-22c8083c247b-audit-dir\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.124779 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7044cdc7-b7c2-454e-9460-8f6b783f85eb-service-ca-bundle\") pod \"authentication-operator-69f744f599-n7vrk\" (UID: \"7044cdc7-b7c2-454e-9460-8f6b783f85eb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-n7vrk" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.125774 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9f2bf47-0b56-44cd-ba31-3e9a5320186d-config\") pod \"openshift-apiserver-operator-796bbdcf4f-r6s9j\" (UID: \"b9f2bf47-0b56-44cd-ba31-3e9a5320186d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-r6s9j" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.126057 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8006abe7-31d8-489d-9005-d96d40bb9ba5-serving-cert\") pod \"controller-manager-879f6c89f-tk9gz\" (UID: \"8006abe7-31d8-489d-9005-d96d40bb9ba5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-tk9gz" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.126194 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8006abe7-31d8-489d-9005-d96d40bb9ba5-client-ca\") pod \"controller-manager-879f6c89f-tk9gz\" (UID: \"8006abe7-31d8-489d-9005-d96d40bb9ba5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-tk9gz" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.126471 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f69f13d6-7550-471c-b84e-e62b06c17c9b-etcd-client\") pod \"apiserver-7bbb656c7d-jlfc5\" (UID: \"f69f13d6-7550-471c-b84e-e62b06c17c9b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.126526 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f69f13d6-7550-471c-b84e-e62b06c17c9b-audit-dir\") pod \"apiserver-7bbb656c7d-jlfc5\" (UID: \"f69f13d6-7550-471c-b84e-e62b06c17c9b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.126958 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7044cdc7-b7c2-454e-9460-8f6b783f85eb-config\") pod \"authentication-operator-69f744f599-n7vrk\" (UID: \"7044cdc7-b7c2-454e-9460-8f6b783f85eb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-n7vrk" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.126997 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/19e87eb5-dc5b-4012-b862-22c8083c247b-audit\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.127487 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/19e87eb5-dc5b-4012-b862-22c8083c247b-image-import-ca\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.127515 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f69f13d6-7550-471c-b84e-e62b06c17c9b-serving-cert\") pod \"apiserver-7bbb656c7d-jlfc5\" (UID: \"f69f13d6-7550-471c-b84e-e62b06c17c9b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.127694 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef5fdbe3-490c-476b-af83-a810d6e5a888-config\") pod \"machine-api-operator-5694c8668f-dslqq\" (UID: \"ef5fdbe3-490c-476b-af83-a810d6e5a888\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dslqq" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.127721 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/19e87eb5-dc5b-4012-b862-22c8083c247b-encryption-config\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.128612 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/19e87eb5-dc5b-4012-b862-22c8083c247b-trusted-ca-bundle\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.128958 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/19e87eb5-dc5b-4012-b862-22c8083c247b-etcd-client\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.129315 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nxp6d"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.129968 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/f2f9ef4d-14c5-47d9-9e7b-234cde2de773-machine-approver-tls\") pod \"machine-approver-56656f9798-fpmfh\" (UID: \"f2f9ef4d-14c5-47d9-9e7b-234cde2de773\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fpmfh" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.130523 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7044cdc7-b7c2-454e-9460-8f6b783f85eb-serving-cert\") pod \"authentication-operator-69f744f599-n7vrk\" (UID: \"7044cdc7-b7c2-454e-9460-8f6b783f85eb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-n7vrk" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.130682 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8006abe7-31d8-489d-9005-d96d40bb9ba5-config\") pod \"controller-manager-879f6c89f-tk9gz\" (UID: \"8006abe7-31d8-489d-9005-d96d40bb9ba5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-tk9gz" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.131135 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-bjp7f"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.131817 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19e87eb5-dc5b-4012-b862-22c8083c247b-serving-cert\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.133466 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-cw9t7"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.133651 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-bjp7f" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.135516 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-nxmj9"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.138949 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-b5flr"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.138963 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q977v"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.143284 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.144673 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f69f13d6-7550-471c-b84e-e62b06c17c9b-encryption-config\") pod \"apiserver-7bbb656c7d-jlfc5\" (UID: \"f69f13d6-7550-471c-b84e-e62b06c17c9b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.144780 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/ef5fdbe3-490c-476b-af83-a810d6e5a888-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-dslqq\" (UID: \"ef5fdbe3-490c-476b-af83-a810d6e5a888\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dslqq" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.150374 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-db64n"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.152040 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-pgx7w"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.154755 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jkdzt"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.157178 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-d2kjq"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.157768 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.159087 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-x47v9"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.160319 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qgwds"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.161638 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-t6nrd"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.162718 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-5w6zp"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.163808 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-psjfm"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.165610 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-88l2c"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.166785 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-bjp7f"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.167862 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zgmzz"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.168955 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-m6xcl"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.170056 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-2jt99"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.171511 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pd2px"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.173279 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-87wfq"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.174734 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-r7kd8"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.174920 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-87wfq" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.175860 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-87wfq"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.176143 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-r7kd8" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.176899 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-r7kd8"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.179605 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-vfq5w"] Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.180285 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.181135 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-vfq5w" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.197945 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.217568 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.226066 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/27951f81-2e39-4d40-a0f9-cd6f66265a41-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-bsmnf\" (UID: \"27951f81-2e39-4d40-a0f9-cd6f66265a41\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bsmnf" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.226194 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d7dccbf4-f7cc-4306-b244-605f2849a805-serving-cert\") pod \"etcd-operator-b45778765-86rlm\" (UID: \"d7dccbf4-f7cc-4306-b244-605f2849a805\") " pod="openshift-etcd-operator/etcd-operator-b45778765-86rlm" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.226253 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b43f39ef-60cd-44be-8061-715fbf71a36b-trusted-ca-bundle\") pod \"console-f9d7485db-2jt99\" (UID: \"b43f39ef-60cd-44be-8061-715fbf71a36b\") " pod="openshift-console/console-f9d7485db-2jt99" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.226286 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hq88c\" (UniqueName: \"kubernetes.io/projected/b43f39ef-60cd-44be-8061-715fbf71a36b-kube-api-access-hq88c\") pod \"console-f9d7485db-2jt99\" (UID: \"b43f39ef-60cd-44be-8061-715fbf71a36b\") " pod="openshift-console/console-f9d7485db-2jt99" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.226316 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.226361 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/c99d5981-7c0d-49d6-8b8e-8cb2c8c1027a-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-zgmzz\" (UID: \"c99d5981-7c0d-49d6-8b8e-8cb2c8c1027a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zgmzz" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.226393 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e3d1074f-cb7e-4fa5-9477-e6c1835b6488-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-psjfm\" (UID: \"e3d1074f-cb7e-4fa5-9477-e6c1835b6488\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-psjfm" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.226418 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xj5cl\" (UniqueName: \"kubernetes.io/projected/d7dccbf4-f7cc-4306-b244-605f2849a805-kube-api-access-xj5cl\") pod \"etcd-operator-b45778765-86rlm\" (UID: \"d7dccbf4-f7cc-4306-b244-605f2849a805\") " pod="openshift-etcd-operator/etcd-operator-b45778765-86rlm" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.226455 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/759c4131-a27e-4a35-b609-6a431eff05a6-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-4k548\" (UID: \"759c4131-a27e-4a35-b609-6a431eff05a6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4k548" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.226501 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ca95e54c-a1d3-40a8-9bb4-fa314e9fe161-auth-proxy-config\") pod \"machine-config-operator-74547568cd-stxr8\" (UID: \"ca95e54c-a1d3-40a8-9bb4-fa314e9fe161\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-stxr8" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.226541 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d7dccbf4-f7cc-4306-b244-605f2849a805-etcd-client\") pod \"etcd-operator-b45778765-86rlm\" (UID: \"d7dccbf4-f7cc-4306-b244-605f2849a805\") " pod="openshift-etcd-operator/etcd-operator-b45778765-86rlm" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.226584 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1886cfcf-db6d-49b2-8f0a-4637996373db-config-volume\") pod \"collect-profiles-29458785-q4d9v\" (UID: \"1886cfcf-db6d-49b2-8f0a-4637996373db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-q4d9v" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.226613 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6wrk\" (UniqueName: \"kubernetes.io/projected/831746c3-7123-4189-a551-7f7852402807-kube-api-access-x6wrk\") pod \"console-operator-58897d9998-cw9t7\" (UID: \"831746c3-7123-4189-a551-7f7852402807\") " pod="openshift-console-operator/console-operator-58897d9998-cw9t7" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.226639 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.226674 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a5f82de9-292f-47ab-9590-00f63a73a25d-trusted-ca\") pod \"ingress-operator-5b745b69d9-dphl6\" (UID: \"a5f82de9-292f-47ab-9590-00f63a73a25d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dphl6" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.226706 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6vq7\" (UniqueName: \"kubernetes.io/projected/5681e3b7-2931-4ebe-a12c-872ad9ab9906-kube-api-access-d6vq7\") pod \"olm-operator-6b444d44fb-nxp6d\" (UID: \"5681e3b7-2931-4ebe-a12c-872ad9ab9906\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nxp6d" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.226738 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b43f39ef-60cd-44be-8061-715fbf71a36b-service-ca\") pod \"console-f9d7485db-2jt99\" (UID: \"b43f39ef-60cd-44be-8061-715fbf71a36b\") " pod="openshift-console/console-f9d7485db-2jt99" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.226766 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e5b9cf7-2752-4ac4-804a-7b88df12b435-config\") pod \"service-ca-operator-777779d784-4m2tr\" (UID: \"8e5b9cf7-2752-4ac4-804a-7b88df12b435\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-4m2tr" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.226797 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/9ab7be4d-a4d6-4857-ba26-2ade2b6b3917-signing-cabundle\") pod \"service-ca-9c57cc56f-x47v9\" (UID: \"9ab7be4d-a4d6-4857-ba26-2ade2b6b3917\") " pod="openshift-service-ca/service-ca-9c57cc56f-x47v9" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.226829 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.226868 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jjx2\" (UniqueName: \"kubernetes.io/projected/92ada4c8-acb9-4740-85dc-815cd8a3b028-kube-api-access-5jjx2\") pod \"downloads-7954f5f757-pgx7w\" (UID: \"92ada4c8-acb9-4740-85dc-815cd8a3b028\") " pod="openshift-console/downloads-7954f5f757-pgx7w" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.226920 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/87bcf788-c637-4ed6-a5da-650979a40203-service-ca-bundle\") pod \"router-default-5444994796-dkgwj\" (UID: \"87bcf788-c637-4ed6-a5da-650979a40203\") " pod="openshift-ingress/router-default-5444994796-dkgwj" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.226952 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/664dcbda-0324-4957-84e5-309cbd624afc-metrics-tls\") pod \"dns-operator-744455d44c-8qsxn\" (UID: \"664dcbda-0324-4957-84e5-309cbd624afc\") " pod="openshift-dns-operator/dns-operator-744455d44c-8qsxn" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.226978 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e099f402-eb6b-4a1c-b5f6-f713fff68945-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-88l2c\" (UID: \"e099f402-eb6b-4a1c-b5f6-f713fff68945\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-88l2c" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227034 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227063 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skx67\" (UniqueName: \"kubernetes.io/projected/87bcf788-c637-4ed6-a5da-650979a40203-kube-api-access-skx67\") pod \"router-default-5444994796-dkgwj\" (UID: \"87bcf788-c637-4ed6-a5da-650979a40203\") " pod="openshift-ingress/router-default-5444994796-dkgwj" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227092 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/ca95e54c-a1d3-40a8-9bb4-fa314e9fe161-proxy-tls\") pod \"machine-config-operator-74547568cd-stxr8\" (UID: \"ca95e54c-a1d3-40a8-9bb4-fa314e9fe161\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-stxr8" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227118 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227154 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2tq9v\" (UniqueName: \"kubernetes.io/projected/8d27792d-685d-4a24-96f6-e38710e1cd6c-kube-api-access-2tq9v\") pod \"packageserver-d55dfcdfc-jkdzt\" (UID: \"8d27792d-685d-4a24-96f6-e38710e1cd6c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jkdzt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227184 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqf8n\" (UniqueName: \"kubernetes.io/projected/664dcbda-0324-4957-84e5-309cbd624afc-kube-api-access-bqf8n\") pod \"dns-operator-744455d44c-8qsxn\" (UID: \"664dcbda-0324-4957-84e5-309cbd624afc\") " pod="openshift-dns-operator/dns-operator-744455d44c-8qsxn" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227208 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/831746c3-7123-4189-a551-7f7852402807-serving-cert\") pod \"console-operator-58897d9998-cw9t7\" (UID: \"831746c3-7123-4189-a551-7f7852402807\") " pod="openshift-console-operator/console-operator-58897d9998-cw9t7" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227235 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-496f7\" (UniqueName: \"kubernetes.io/projected/b5684173-3dc4-40bb-8424-096e087c1afd-kube-api-access-496f7\") pod \"machine-config-controller-84d6567774-5w6zp\" (UID: \"b5684173-3dc4-40bb-8424-096e087c1afd\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5w6zp" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227272 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/87bcf788-c637-4ed6-a5da-650979a40203-metrics-certs\") pod \"router-default-5444994796-dkgwj\" (UID: \"87bcf788-c637-4ed6-a5da-650979a40203\") " pod="openshift-ingress/router-default-5444994796-dkgwj" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227299 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3d1074f-cb7e-4fa5-9477-e6c1835b6488-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-psjfm\" (UID: \"e3d1074f-cb7e-4fa5-9477-e6c1835b6488\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-psjfm" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227330 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/508261e1-05b6-486d-9724-768d8729d7dd-audit-dir\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227358 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/9f843c7a-a6a4-4d2d-976f-0a955e8a2777-profile-collector-cert\") pod \"catalog-operator-68c6474976-t6nrd\" (UID: \"9f843c7a-a6a4-4d2d-976f-0a955e8a2777\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-t6nrd" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227390 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/9f843c7a-a6a4-4d2d-976f-0a955e8a2777-srv-cert\") pod \"catalog-operator-68c6474976-t6nrd\" (UID: \"9f843c7a-a6a4-4d2d-976f-0a955e8a2777\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-t6nrd" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227408 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ca95e54c-a1d3-40a8-9bb4-fa314e9fe161-auth-proxy-config\") pod \"machine-config-operator-74547568cd-stxr8\" (UID: \"ca95e54c-a1d3-40a8-9bb4-fa314e9fe161\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-stxr8" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227422 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227466 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a5f82de9-292f-47ab-9590-00f63a73a25d-bound-sa-token\") pod \"ingress-operator-5b745b69d9-dphl6\" (UID: \"a5f82de9-292f-47ab-9590-00f63a73a25d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dphl6" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227507 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b43f39ef-60cd-44be-8061-715fbf71a36b-oauth-serving-cert\") pod \"console-f9d7485db-2jt99\" (UID: \"b43f39ef-60cd-44be-8061-715fbf71a36b\") " pod="openshift-console/console-f9d7485db-2jt99" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227543 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/9ab7be4d-a4d6-4857-ba26-2ade2b6b3917-signing-key\") pod \"service-ca-9c57cc56f-x47v9\" (UID: \"9ab7be4d-a4d6-4857-ba26-2ade2b6b3917\") " pod="openshift-service-ca/service-ca-9c57cc56f-x47v9" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227579 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227614 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqgp5\" (UniqueName: \"kubernetes.io/projected/a5f82de9-292f-47ab-9590-00f63a73a25d-kube-api-access-lqgp5\") pod \"ingress-operator-5b745b69d9-dphl6\" (UID: \"a5f82de9-292f-47ab-9590-00f63a73a25d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dphl6" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227640 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bv66\" (UniqueName: \"kubernetes.io/projected/508261e1-05b6-486d-9724-768d8729d7dd-kube-api-access-8bv66\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227669 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8d27792d-685d-4a24-96f6-e38710e1cd6c-webhook-cert\") pod \"packageserver-d55dfcdfc-jkdzt\" (UID: \"8d27792d-685d-4a24-96f6-e38710e1cd6c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jkdzt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227693 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zs2xl\" (UniqueName: \"kubernetes.io/projected/e099f402-eb6b-4a1c-b5f6-f713fff68945-kube-api-access-zs2xl\") pod \"multus-admission-controller-857f4d67dd-88l2c\" (UID: \"e099f402-eb6b-4a1c-b5f6-f713fff68945\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-88l2c" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227700 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1886cfcf-db6d-49b2-8f0a-4637996373db-config-volume\") pod \"collect-profiles-29458785-q4d9v\" (UID: \"1886cfcf-db6d-49b2-8f0a-4637996373db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-q4d9v" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227718 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a7865114-eb86-4994-b91d-3c39f9cee6a5-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-6smzv\" (UID: \"a7865114-eb86-4994-b91d-3c39f9cee6a5\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6smzv" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227745 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7865114-eb86-4994-b91d-3c39f9cee6a5-config\") pod \"kube-controller-manager-operator-78b949d7b-6smzv\" (UID: \"a7865114-eb86-4994-b91d-3c39f9cee6a5\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6smzv" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227774 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/831746c3-7123-4189-a551-7f7852402807-trusted-ca\") pod \"console-operator-58897d9998-cw9t7\" (UID: \"831746c3-7123-4189-a551-7f7852402807\") " pod="openshift-console-operator/console-operator-58897d9998-cw9t7" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227799 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/dfb130da-bcbf-46d8-8e7e-2b1f17fe71ce-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-2g644\" (UID: \"dfb130da-bcbf-46d8-8e7e-2b1f17fe71ce\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2g644" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227823 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b43f39ef-60cd-44be-8061-715fbf71a36b-console-serving-cert\") pod \"console-f9d7485db-2jt99\" (UID: \"b43f39ef-60cd-44be-8061-715fbf71a36b\") " pod="openshift-console/console-f9d7485db-2jt99" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227850 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d7dccbf4-f7cc-4306-b244-605f2849a805-config\") pod \"etcd-operator-b45778765-86rlm\" (UID: \"d7dccbf4-f7cc-4306-b244-605f2849a805\") " pod="openshift-etcd-operator/etcd-operator-b45778765-86rlm" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227891 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8d27792d-685d-4a24-96f6-e38710e1cd6c-apiservice-cert\") pod \"packageserver-d55dfcdfc-jkdzt\" (UID: \"8d27792d-685d-4a24-96f6-e38710e1cd6c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jkdzt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227916 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227952 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/831746c3-7123-4189-a551-7f7852402807-config\") pod \"console-operator-58897d9998-cw9t7\" (UID: \"831746c3-7123-4189-a551-7f7852402807\") " pod="openshift-console-operator/console-operator-58897d9998-cw9t7" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.227981 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b5684173-3dc4-40bb-8424-096e087c1afd-proxy-tls\") pod \"machine-config-controller-84d6567774-5w6zp\" (UID: \"b5684173-3dc4-40bb-8424-096e087c1afd\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5w6zp" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228007 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thswx\" (UniqueName: \"kubernetes.io/projected/56927c4a-a76e-4c0e-b2d4-2ff8cf31d8eb-kube-api-access-thswx\") pod \"migrator-59844c95c7-b5flr\" (UID: \"56927c4a-a76e-4c0e-b2d4-2ff8cf31d8eb\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-b5flr" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228055 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/8d27792d-685d-4a24-96f6-e38710e1cd6c-tmpfs\") pod \"packageserver-d55dfcdfc-jkdzt\" (UID: \"8d27792d-685d-4a24-96f6-e38710e1cd6c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jkdzt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228083 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhlcj\" (UniqueName: \"kubernetes.io/projected/ca95e54c-a1d3-40a8-9bb4-fa314e9fe161-kube-api-access-fhlcj\") pod \"machine-config-operator-74547568cd-stxr8\" (UID: \"ca95e54c-a1d3-40a8-9bb4-fa314e9fe161\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-stxr8" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228110 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45x9h\" (UniqueName: \"kubernetes.io/projected/1886cfcf-db6d-49b2-8f0a-4637996373db-kube-api-access-45x9h\") pod \"collect-profiles-29458785-q4d9v\" (UID: \"1886cfcf-db6d-49b2-8f0a-4637996373db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-q4d9v" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228131 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e5b9cf7-2752-4ac4-804a-7b88df12b435-config\") pod \"service-ca-operator-777779d784-4m2tr\" (UID: \"8e5b9cf7-2752-4ac4-804a-7b88df12b435\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-4m2tr" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228134 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjlg4\" (UniqueName: \"kubernetes.io/projected/c99d5981-7c0d-49d6-8b8e-8cb2c8c1027a-kube-api-access-sjlg4\") pod \"control-plane-machine-set-operator-78cbb6b69f-zgmzz\" (UID: \"c99d5981-7c0d-49d6-8b8e-8cb2c8c1027a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zgmzz" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228192 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b43f39ef-60cd-44be-8061-715fbf71a36b-console-config\") pod \"console-f9d7485db-2jt99\" (UID: \"b43f39ef-60cd-44be-8061-715fbf71a36b\") " pod="openshift-console/console-f9d7485db-2jt99" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228221 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228225 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a5f82de9-292f-47ab-9590-00f63a73a25d-trusted-ca\") pod \"ingress-operator-5b745b69d9-dphl6\" (UID: \"a5f82de9-292f-47ab-9590-00f63a73a25d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dphl6" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228249 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62eb9818-98b0-40f2-9629-78923c5b112f-serving-cert\") pod \"openshift-config-operator-7777fb866f-cd6dv\" (UID: \"62eb9818-98b0-40f2-9629-78923c5b112f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cd6dv" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228304 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/62eb9818-98b0-40f2-9629-78923c5b112f-available-featuregates\") pod \"openshift-config-operator-7777fb866f-cd6dv\" (UID: \"62eb9818-98b0-40f2-9629-78923c5b112f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cd6dv" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228334 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2d6j\" (UniqueName: \"kubernetes.io/projected/cec0c8d3-e486-4b4c-8f49-9a04926b7f05-kube-api-access-z2d6j\") pod \"package-server-manager-789f6589d5-qgwds\" (UID: \"cec0c8d3-e486-4b4c-8f49-9a04926b7f05\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qgwds" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228359 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b46t4\" (UniqueName: \"kubernetes.io/projected/8e5b9cf7-2752-4ac4-804a-7b88df12b435-kube-api-access-b46t4\") pod \"service-ca-operator-777779d784-4m2tr\" (UID: \"8e5b9cf7-2752-4ac4-804a-7b88df12b435\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-4m2tr" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228383 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3d1074f-cb7e-4fa5-9477-e6c1835b6488-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-psjfm\" (UID: \"e3d1074f-cb7e-4fa5-9477-e6c1835b6488\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-psjfm" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228405 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69drk\" (UniqueName: \"kubernetes.io/projected/dfb130da-bcbf-46d8-8e7e-2b1f17fe71ce-kube-api-access-69drk\") pod \"cluster-samples-operator-665b6dd947-2g644\" (UID: \"dfb130da-bcbf-46d8-8e7e-2b1f17fe71ce\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2g644" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228427 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dnv27\" (UniqueName: \"kubernetes.io/projected/9ab7be4d-a4d6-4857-ba26-2ade2b6b3917-kube-api-access-dnv27\") pod \"service-ca-9c57cc56f-x47v9\" (UID: \"9ab7be4d-a4d6-4857-ba26-2ade2b6b3917\") " pod="openshift-service-ca/service-ca-9c57cc56f-x47v9" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228448 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/27951f81-2e39-4d40-a0f9-cd6f66265a41-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-bsmnf\" (UID: \"27951f81-2e39-4d40-a0f9-cd6f66265a41\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bsmnf" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228468 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/5681e3b7-2931-4ebe-a12c-872ad9ab9906-srv-cert\") pod \"olm-operator-6b444d44fb-nxp6d\" (UID: \"5681e3b7-2931-4ebe-a12c-872ad9ab9906\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nxp6d" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228487 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pks9v\" (UniqueName: \"kubernetes.io/projected/62eb9818-98b0-40f2-9629-78923c5b112f-kube-api-access-pks9v\") pod \"openshift-config-operator-7777fb866f-cd6dv\" (UID: \"62eb9818-98b0-40f2-9629-78923c5b112f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cd6dv" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228506 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a5f82de9-292f-47ab-9590-00f63a73a25d-metrics-tls\") pod \"ingress-operator-5b745b69d9-dphl6\" (UID: \"a5f82de9-292f-47ab-9590-00f63a73a25d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dphl6" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228538 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/87bcf788-c637-4ed6-a5da-650979a40203-default-certificate\") pod \"router-default-5444994796-dkgwj\" (UID: \"87bcf788-c637-4ed6-a5da-650979a40203\") " pod="openshift-ingress/router-default-5444994796-dkgwj" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228559 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/508261e1-05b6-486d-9724-768d8729d7dd-audit-policies\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228629 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/759c4131-a27e-4a35-b609-6a431eff05a6-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-4k548\" (UID: \"759c4131-a27e-4a35-b609-6a431eff05a6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4k548" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228649 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ca95e54c-a1d3-40a8-9bb4-fa314e9fe161-images\") pod \"machine-config-operator-74547568cd-stxr8\" (UID: \"ca95e54c-a1d3-40a8-9bb4-fa314e9fe161\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-stxr8" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228732 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87sxh\" (UniqueName: \"kubernetes.io/projected/759c4131-a27e-4a35-b609-6a431eff05a6-kube-api-access-87sxh\") pod \"openshift-controller-manager-operator-756b6f6bc6-4k548\" (UID: \"759c4131-a27e-4a35-b609-6a431eff05a6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4k548" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228790 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a7865114-eb86-4994-b91d-3c39f9cee6a5-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-6smzv\" (UID: \"a7865114-eb86-4994-b91d-3c39f9cee6a5\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6smzv" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228816 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/cec0c8d3-e486-4b4c-8f49-9a04926b7f05-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-qgwds\" (UID: \"cec0c8d3-e486-4b4c-8f49-9a04926b7f05\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qgwds" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228838 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/d7dccbf4-f7cc-4306-b244-605f2849a805-etcd-service-ca\") pod \"etcd-operator-b45778765-86rlm\" (UID: \"d7dccbf4-f7cc-4306-b244-605f2849a805\") " pod="openshift-etcd-operator/etcd-operator-b45778765-86rlm" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228880 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27951f81-2e39-4d40-a0f9-cd6f66265a41-config\") pod \"kube-apiserver-operator-766d6c64bb-bsmnf\" (UID: \"27951f81-2e39-4d40-a0f9-cd6f66265a41\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bsmnf" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228904 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b43f39ef-60cd-44be-8061-715fbf71a36b-console-oauth-config\") pod \"console-f9d7485db-2jt99\" (UID: \"b43f39ef-60cd-44be-8061-715fbf71a36b\") " pod="openshift-console/console-f9d7485db-2jt99" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228952 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crf55\" (UniqueName: \"kubernetes.io/projected/9f843c7a-a6a4-4d2d-976f-0a955e8a2777-kube-api-access-crf55\") pod \"catalog-operator-68c6474976-t6nrd\" (UID: \"9f843c7a-a6a4-4d2d-976f-0a955e8a2777\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-t6nrd" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228975 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.228997 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/5681e3b7-2931-4ebe-a12c-872ad9ab9906-profile-collector-cert\") pod \"olm-operator-6b444d44fb-nxp6d\" (UID: \"5681e3b7-2931-4ebe-a12c-872ad9ab9906\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nxp6d" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.229053 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e5b9cf7-2752-4ac4-804a-7b88df12b435-serving-cert\") pod \"service-ca-operator-777779d784-4m2tr\" (UID: \"8e5b9cf7-2752-4ac4-804a-7b88df12b435\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-4m2tr" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.229077 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.229119 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/87bcf788-c637-4ed6-a5da-650979a40203-stats-auth\") pod \"router-default-5444994796-dkgwj\" (UID: \"87bcf788-c637-4ed6-a5da-650979a40203\") " pod="openshift-ingress/router-default-5444994796-dkgwj" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.229144 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b5684173-3dc4-40bb-8424-096e087c1afd-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-5w6zp\" (UID: \"b5684173-3dc4-40bb-8424-096e087c1afd\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5w6zp" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.229167 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/d7dccbf4-f7cc-4306-b244-605f2849a805-etcd-ca\") pod \"etcd-operator-b45778765-86rlm\" (UID: \"d7dccbf4-f7cc-4306-b244-605f2849a805\") " pod="openshift-etcd-operator/etcd-operator-b45778765-86rlm" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.229213 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1886cfcf-db6d-49b2-8f0a-4637996373db-secret-volume\") pod \"collect-profiles-29458785-q4d9v\" (UID: \"1886cfcf-db6d-49b2-8f0a-4637996373db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-q4d9v" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.229595 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/759c4131-a27e-4a35-b609-6a431eff05a6-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-4k548\" (UID: \"759c4131-a27e-4a35-b609-6a431eff05a6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4k548" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.229743 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ca95e54c-a1d3-40a8-9bb4-fa314e9fe161-images\") pod \"machine-config-operator-74547568cd-stxr8\" (UID: \"ca95e54c-a1d3-40a8-9bb4-fa314e9fe161\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-stxr8" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.230128 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/62eb9818-98b0-40f2-9629-78923c5b112f-available-featuregates\") pod \"openshift-config-operator-7777fb866f-cd6dv\" (UID: \"62eb9818-98b0-40f2-9629-78923c5b112f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cd6dv" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.231631 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/27951f81-2e39-4d40-a0f9-cd6f66265a41-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-bsmnf\" (UID: \"27951f81-2e39-4d40-a0f9-cd6f66265a41\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bsmnf" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.231951 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/831746c3-7123-4189-a551-7f7852402807-config\") pod \"console-operator-58897d9998-cw9t7\" (UID: \"831746c3-7123-4189-a551-7f7852402807\") " pod="openshift-console-operator/console-operator-58897d9998-cw9t7" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.231992 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/664dcbda-0324-4957-84e5-309cbd624afc-metrics-tls\") pod \"dns-operator-744455d44c-8qsxn\" (UID: \"664dcbda-0324-4957-84e5-309cbd624afc\") " pod="openshift-dns-operator/dns-operator-744455d44c-8qsxn" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.232385 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/831746c3-7123-4189-a551-7f7852402807-serving-cert\") pod \"console-operator-58897d9998-cw9t7\" (UID: \"831746c3-7123-4189-a551-7f7852402807\") " pod="openshift-console-operator/console-operator-58897d9998-cw9t7" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.232764 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/8d27792d-685d-4a24-96f6-e38710e1cd6c-tmpfs\") pod \"packageserver-d55dfcdfc-jkdzt\" (UID: \"8d27792d-685d-4a24-96f6-e38710e1cd6c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jkdzt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.232781 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/87bcf788-c637-4ed6-a5da-650979a40203-default-certificate\") pod \"router-default-5444994796-dkgwj\" (UID: \"87bcf788-c637-4ed6-a5da-650979a40203\") " pod="openshift-ingress/router-default-5444994796-dkgwj" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.232837 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/508261e1-05b6-486d-9724-768d8729d7dd-audit-dir\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.232984 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/87bcf788-c637-4ed6-a5da-650979a40203-service-ca-bundle\") pod \"router-default-5444994796-dkgwj\" (UID: \"87bcf788-c637-4ed6-a5da-650979a40203\") " pod="openshift-ingress/router-default-5444994796-dkgwj" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.233809 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7865114-eb86-4994-b91d-3c39f9cee6a5-config\") pod \"kube-controller-manager-operator-78b949d7b-6smzv\" (UID: \"a7865114-eb86-4994-b91d-3c39f9cee6a5\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6smzv" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.233939 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/759c4131-a27e-4a35-b609-6a431eff05a6-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-4k548\" (UID: \"759c4131-a27e-4a35-b609-6a431eff05a6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4k548" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.234938 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/831746c3-7123-4189-a551-7f7852402807-trusted-ca\") pod \"console-operator-58897d9998-cw9t7\" (UID: \"831746c3-7123-4189-a551-7f7852402807\") " pod="openshift-console-operator/console-operator-58897d9998-cw9t7" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.235450 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b5684173-3dc4-40bb-8424-096e087c1afd-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-5w6zp\" (UID: \"b5684173-3dc4-40bb-8424-096e087c1afd\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5w6zp" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.235730 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1886cfcf-db6d-49b2-8f0a-4637996373db-secret-volume\") pod \"collect-profiles-29458785-q4d9v\" (UID: \"1886cfcf-db6d-49b2-8f0a-4637996373db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-q4d9v" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.236121 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a7865114-eb86-4994-b91d-3c39f9cee6a5-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-6smzv\" (UID: \"a7865114-eb86-4994-b91d-3c39f9cee6a5\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6smzv" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.236175 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a5f82de9-292f-47ab-9590-00f63a73a25d-metrics-tls\") pod \"ingress-operator-5b745b69d9-dphl6\" (UID: \"a5f82de9-292f-47ab-9590-00f63a73a25d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dphl6" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.236315 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/9f843c7a-a6a4-4d2d-976f-0a955e8a2777-profile-collector-cert\") pod \"catalog-operator-68c6474976-t6nrd\" (UID: \"9f843c7a-a6a4-4d2d-976f-0a955e8a2777\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-t6nrd" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.236880 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/ca95e54c-a1d3-40a8-9bb4-fa314e9fe161-proxy-tls\") pod \"machine-config-operator-74547568cd-stxr8\" (UID: \"ca95e54c-a1d3-40a8-9bb4-fa314e9fe161\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-stxr8" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.237405 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/87bcf788-c637-4ed6-a5da-650979a40203-metrics-certs\") pod \"router-default-5444994796-dkgwj\" (UID: \"87bcf788-c637-4ed6-a5da-650979a40203\") " pod="openshift-ingress/router-default-5444994796-dkgwj" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.237498 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.237873 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62eb9818-98b0-40f2-9629-78923c5b112f-serving-cert\") pod \"openshift-config-operator-7777fb866f-cd6dv\" (UID: \"62eb9818-98b0-40f2-9629-78923c5b112f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cd6dv" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.238091 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/5681e3b7-2931-4ebe-a12c-872ad9ab9906-profile-collector-cert\") pod \"olm-operator-6b444d44fb-nxp6d\" (UID: \"5681e3b7-2931-4ebe-a12c-872ad9ab9906\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nxp6d" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.238371 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/5681e3b7-2931-4ebe-a12c-872ad9ab9906-srv-cert\") pod \"olm-operator-6b444d44fb-nxp6d\" (UID: \"5681e3b7-2931-4ebe-a12c-872ad9ab9906\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nxp6d" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.238561 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/dfb130da-bcbf-46d8-8e7e-2b1f17fe71ce-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-2g644\" (UID: \"dfb130da-bcbf-46d8-8e7e-2b1f17fe71ce\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2g644" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.238817 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e5b9cf7-2752-4ac4-804a-7b88df12b435-serving-cert\") pod \"service-ca-operator-777779d784-4m2tr\" (UID: \"8e5b9cf7-2752-4ac4-804a-7b88df12b435\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-4m2tr" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.239548 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/87bcf788-c637-4ed6-a5da-650979a40203-stats-auth\") pod \"router-default-5444994796-dkgwj\" (UID: \"87bcf788-c637-4ed6-a5da-650979a40203\") " pod="openshift-ingress/router-default-5444994796-dkgwj" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.244290 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27951f81-2e39-4d40-a0f9-cd6f66265a41-config\") pod \"kube-apiserver-operator-766d6c64bb-bsmnf\" (UID: \"27951f81-2e39-4d40-a0f9-cd6f66265a41\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bsmnf" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.260999 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.277887 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.290077 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d7dccbf4-f7cc-4306-b244-605f2849a805-etcd-client\") pod \"etcd-operator-b45778765-86rlm\" (UID: \"d7dccbf4-f7cc-4306-b244-605f2849a805\") " pod="openshift-etcd-operator/etcd-operator-b45778765-86rlm" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.297267 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.305359 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d7dccbf4-f7cc-4306-b244-605f2849a805-config\") pod \"etcd-operator-b45778765-86rlm\" (UID: \"d7dccbf4-f7cc-4306-b244-605f2849a805\") " pod="openshift-etcd-operator/etcd-operator-b45778765-86rlm" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.318060 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.325200 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/d7dccbf4-f7cc-4306-b244-605f2849a805-etcd-ca\") pod \"etcd-operator-b45778765-86rlm\" (UID: \"d7dccbf4-f7cc-4306-b244-605f2849a805\") " pod="openshift-etcd-operator/etcd-operator-b45778765-86rlm" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.338697 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.344288 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/d7dccbf4-f7cc-4306-b244-605f2849a805-etcd-service-ca\") pod \"etcd-operator-b45778765-86rlm\" (UID: \"d7dccbf4-f7cc-4306-b244-605f2849a805\") " pod="openshift-etcd-operator/etcd-operator-b45778765-86rlm" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.358485 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.378574 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.397539 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.411417 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d7dccbf4-f7cc-4306-b244-605f2849a805-serving-cert\") pod \"etcd-operator-b45778765-86rlm\" (UID: \"d7dccbf4-f7cc-4306-b244-605f2849a805\") " pod="openshift-etcd-operator/etcd-operator-b45778765-86rlm" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.418160 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.431619 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/c99d5981-7c0d-49d6-8b8e-8cb2c8c1027a-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-zgmzz\" (UID: \"c99d5981-7c0d-49d6-8b8e-8cb2c8c1027a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zgmzz" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.438543 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.477993 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.488507 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/9f843c7a-a6a4-4d2d-976f-0a955e8a2777-srv-cert\") pod \"catalog-operator-68c6474976-t6nrd\" (UID: \"9f843c7a-a6a4-4d2d-976f-0a955e8a2777\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-t6nrd" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.498740 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.512641 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e099f402-eb6b-4a1c-b5f6-f713fff68945-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-88l2c\" (UID: \"e099f402-eb6b-4a1c-b5f6-f713fff68945\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-88l2c" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.517484 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.538701 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.558450 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.567496 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b43f39ef-60cd-44be-8061-715fbf71a36b-console-oauth-config\") pod \"console-f9d7485db-2jt99\" (UID: \"b43f39ef-60cd-44be-8061-715fbf71a36b\") " pod="openshift-console/console-f9d7485db-2jt99" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.579067 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.600179 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.608198 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b43f39ef-60cd-44be-8061-715fbf71a36b-console-serving-cert\") pod \"console-f9d7485db-2jt99\" (UID: \"b43f39ef-60cd-44be-8061-715fbf71a36b\") " pod="openshift-console/console-f9d7485db-2jt99" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.618074 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.626247 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b43f39ef-60cd-44be-8061-715fbf71a36b-console-config\") pod \"console-f9d7485db-2jt99\" (UID: \"b43f39ef-60cd-44be-8061-715fbf71a36b\") " pod="openshift-console/console-f9d7485db-2jt99" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.637486 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.648630 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b43f39ef-60cd-44be-8061-715fbf71a36b-service-ca\") pod \"console-f9d7485db-2jt99\" (UID: \"b43f39ef-60cd-44be-8061-715fbf71a36b\") " pod="openshift-console/console-f9d7485db-2jt99" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.667715 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.677860 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.679147 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b43f39ef-60cd-44be-8061-715fbf71a36b-trusted-ca-bundle\") pod \"console-f9d7485db-2jt99\" (UID: \"b43f39ef-60cd-44be-8061-715fbf71a36b\") " pod="openshift-console/console-f9d7485db-2jt99" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.683785 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b43f39ef-60cd-44be-8061-715fbf71a36b-oauth-serving-cert\") pod \"console-f9d7485db-2jt99\" (UID: \"b43f39ef-60cd-44be-8061-715fbf71a36b\") " pod="openshift-console/console-f9d7485db-2jt99" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.698956 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.718771 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.728927 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/cec0c8d3-e486-4b4c-8f49-9a04926b7f05-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-qgwds\" (UID: \"cec0c8d3-e486-4b4c-8f49-9a04926b7f05\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qgwds" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.738635 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.759182 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.779200 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.799377 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.806737 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.806828 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.806920 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.819438 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.827594 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b5684173-3dc4-40bb-8424-096e087c1afd-proxy-tls\") pod \"machine-config-controller-84d6567774-5w6zp\" (UID: \"b5684173-3dc4-40bb-8424-096e087c1afd\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5w6zp" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.838694 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.848095 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8d27792d-685d-4a24-96f6-e38710e1cd6c-webhook-cert\") pod \"packageserver-d55dfcdfc-jkdzt\" (UID: \"8d27792d-685d-4a24-96f6-e38710e1cd6c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jkdzt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.850265 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8d27792d-685d-4a24-96f6-e38710e1cd6c-apiservice-cert\") pod \"packageserver-d55dfcdfc-jkdzt\" (UID: \"8d27792d-685d-4a24-96f6-e38710e1cd6c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jkdzt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.860373 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.879428 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.887924 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/9ab7be4d-a4d6-4857-ba26-2ade2b6b3917-signing-key\") pod \"service-ca-9c57cc56f-x47v9\" (UID: \"9ab7be4d-a4d6-4857-ba26-2ade2b6b3917\") " pod="openshift-service-ca/service-ca-9c57cc56f-x47v9" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.899944 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.911339 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/9ab7be4d-a4d6-4857-ba26-2ade2b6b3917-signing-cabundle\") pod \"service-ca-9c57cc56f-x47v9\" (UID: \"9ab7be4d-a4d6-4857-ba26-2ade2b6b3917\") " pod="openshift-service-ca/service-ca-9c57cc56f-x47v9" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.918807 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.938429 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.959251 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.961049 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.978690 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.988933 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:21 crc kubenswrapper[5003]: I0104 11:50:21.998402 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.007581 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.024787 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.029349 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.038596 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.048941 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.056512 5003 request.go:700] Waited for 1.016965829s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/secrets?fieldSelector=metadata.name%3Dv4-0-config-system-serving-cert&limit=500&resourceVersion=0 Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.059572 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.073008 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.079101 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.093162 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.108665 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.118625 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.118963 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.138651 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.149251 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.178323 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.180686 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.193960 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.200462 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.201055 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.217344 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 04 11:50:22 crc kubenswrapper[5003]: E0104 11:50:22.231422 5003 secret.go:188] Couldn't get secret openshift-kube-scheduler-operator/kube-scheduler-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Jan 04 11:50:22 crc kubenswrapper[5003]: E0104 11:50:22.231563 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e3d1074f-cb7e-4fa5-9477-e6c1835b6488-serving-cert podName:e3d1074f-cb7e-4fa5-9477-e6c1835b6488 nodeName:}" failed. No retries permitted until 2026-01-04 11:50:22.731535507 +0000 UTC m=+138.204565348 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/e3d1074f-cb7e-4fa5-9477-e6c1835b6488-serving-cert") pod "openshift-kube-scheduler-operator-5fdd9b5758-psjfm" (UID: "e3d1074f-cb7e-4fa5-9477-e6c1835b6488") : failed to sync secret cache: timed out waiting for the condition Jan 04 11:50:22 crc kubenswrapper[5003]: E0104 11:50:22.233600 5003 configmap.go:193] Couldn't get configMap openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-config: failed to sync configmap cache: timed out waiting for the condition Jan 04 11:50:22 crc kubenswrapper[5003]: E0104 11:50:22.233771 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e3d1074f-cb7e-4fa5-9477-e6c1835b6488-config podName:e3d1074f-cb7e-4fa5-9477-e6c1835b6488 nodeName:}" failed. No retries permitted until 2026-01-04 11:50:22.733745987 +0000 UTC m=+138.206775828 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/e3d1074f-cb7e-4fa5-9477-e6c1835b6488-config") pod "openshift-kube-scheduler-operator-5fdd9b5758-psjfm" (UID: "e3d1074f-cb7e-4fa5-9477-e6c1835b6488") : failed to sync configmap cache: timed out waiting for the condition Jan 04 11:50:22 crc kubenswrapper[5003]: E0104 11:50:22.233627 5003 configmap.go:193] Couldn't get configMap openshift-authentication/audit: failed to sync configmap cache: timed out waiting for the condition Jan 04 11:50:22 crc kubenswrapper[5003]: E0104 11:50:22.233925 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/508261e1-05b6-486d-9724-768d8729d7dd-audit-policies podName:508261e1-05b6-486d-9724-768d8729d7dd nodeName:}" failed. No retries permitted until 2026-01-04 11:50:22.733915051 +0000 UTC m=+138.206944892 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "audit-policies" (UniqueName: "kubernetes.io/configmap/508261e1-05b6-486d-9724-768d8729d7dd-audit-policies") pod "oauth-openshift-558db77b4-pd2px" (UID: "508261e1-05b6-486d-9724-768d8729d7dd") : failed to sync configmap cache: timed out waiting for the condition Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.238219 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.257955 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.278245 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.297607 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.318160 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.337273 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.357961 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.377938 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.398479 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.430650 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.438667 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.458931 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.478736 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.508814 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.517869 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.538788 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.558927 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.577368 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.597459 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.619196 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.637941 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.658086 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.715438 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rp2h9\" (UniqueName: \"kubernetes.io/projected/f69f13d6-7550-471c-b84e-e62b06c17c9b-kube-api-access-rp2h9\") pod \"apiserver-7bbb656c7d-jlfc5\" (UID: \"f69f13d6-7550-471c-b84e-e62b06c17c9b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.733969 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfjlz\" (UniqueName: \"kubernetes.io/projected/19e87eb5-dc5b-4012-b862-22c8083c247b-kube-api-access-dfjlz\") pod \"apiserver-76f77b778f-tlh5b\" (UID: \"19e87eb5-dc5b-4012-b862-22c8083c247b\") " pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.753896 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5xtf\" (UniqueName: \"kubernetes.io/projected/f2f9ef4d-14c5-47d9-9e7b-234cde2de773-kube-api-access-r5xtf\") pod \"machine-approver-56656f9798-fpmfh\" (UID: \"f2f9ef4d-14c5-47d9-9e7b-234cde2de773\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fpmfh" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.756719 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3d1074f-cb7e-4fa5-9477-e6c1835b6488-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-psjfm\" (UID: \"e3d1074f-cb7e-4fa5-9477-e6c1835b6488\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-psjfm" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.756887 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3d1074f-cb7e-4fa5-9477-e6c1835b6488-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-psjfm\" (UID: \"e3d1074f-cb7e-4fa5-9477-e6c1835b6488\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-psjfm" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.756950 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/508261e1-05b6-486d-9724-768d8729d7dd-audit-policies\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.758705 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/508261e1-05b6-486d-9724-768d8729d7dd-audit-policies\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.759413 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3d1074f-cb7e-4fa5-9477-e6c1835b6488-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-psjfm\" (UID: \"e3d1074f-cb7e-4fa5-9477-e6c1835b6488\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-psjfm" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.764602 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3d1074f-cb7e-4fa5-9477-e6c1835b6488-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-psjfm\" (UID: \"e3d1074f-cb7e-4fa5-9477-e6c1835b6488\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-psjfm" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.773482 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.785814 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ghpkx\" (UniqueName: \"kubernetes.io/projected/ef5fdbe3-490c-476b-af83-a810d6e5a888-kube-api-access-ghpkx\") pod \"machine-api-operator-5694c8668f-dslqq\" (UID: \"ef5fdbe3-490c-476b-af83-a810d6e5a888\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-dslqq" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.786665 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55q26\" (UniqueName: \"kubernetes.io/projected/8006abe7-31d8-489d-9005-d96d40bb9ba5-kube-api-access-55q26\") pod \"controller-manager-879f6c89f-tk9gz\" (UID: \"8006abe7-31d8-489d-9005-d96d40bb9ba5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-tk9gz" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.801313 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-tk9gz" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.814182 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2w5q\" (UniqueName: \"kubernetes.io/projected/40dada5c-5a67-4362-b9fb-e49a7fc32307-kube-api-access-g2w5q\") pod \"route-controller-manager-6576b87f9c-nxmj9\" (UID: \"40dada5c-5a67-4362-b9fb-e49a7fc32307\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nxmj9" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.825449 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2mvwh\" (UniqueName: \"kubernetes.io/projected/7044cdc7-b7c2-454e-9460-8f6b783f85eb-kube-api-access-2mvwh\") pod \"authentication-operator-69f744f599-n7vrk\" (UID: \"7044cdc7-b7c2-454e-9460-8f6b783f85eb\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-n7vrk" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.827793 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.836883 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwqb7\" (UniqueName: \"kubernetes.io/projected/b9f2bf47-0b56-44cd-ba31-3e9a5320186d-kube-api-access-wwqb7\") pod \"openshift-apiserver-operator-796bbdcf4f-r6s9j\" (UID: \"b9f2bf47-0b56-44cd-ba31-3e9a5320186d\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-r6s9j" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.837309 5003 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.844749 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nxmj9" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.857754 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fpmfh" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.857810 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.867999 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-n7vrk" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.880278 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.917981 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.940171 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.958410 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.977708 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 04 11:50:22 crc kubenswrapper[5003]: I0104 11:50:22.997498 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.021486 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.022492 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-r6s9j" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.029258 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-tk9gz"] Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.039827 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 04 11:50:23 crc kubenswrapper[5003]: W0104 11:50:23.044207 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8006abe7_31d8_489d_9005_d96d40bb9ba5.slice/crio-7f8437beccc07bc5d223da57b86f240f2a510a4dbe614c1026e139c9bbc085e0 WatchSource:0}: Error finding container 7f8437beccc07bc5d223da57b86f240f2a510a4dbe614c1026e139c9bbc085e0: Status 404 returned error can't find the container with id 7f8437beccc07bc5d223da57b86f240f2a510a4dbe614c1026e139c9bbc085e0 Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.056657 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-dslqq" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.059027 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.075897 5003 request.go:700] Waited for 1.894223341s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/secrets?fieldSelector=metadata.name%3Dmachine-config-server-dockercfg-qx5rd&limit=500&resourceVersion=0 Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.078277 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.090886 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-tlh5b"] Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.099433 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 04 11:50:23 crc kubenswrapper[5003]: W0104 11:50:23.109195 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod19e87eb5_dc5b_4012_b862_22c8083c247b.slice/crio-1a3497a2d75a07865dde9c8889b6aeb0b8de63ad8f97ad5b76a6d792e8a53f02 WatchSource:0}: Error finding container 1a3497a2d75a07865dde9c8889b6aeb0b8de63ad8f97ad5b76a6d792e8a53f02: Status 404 returned error can't find the container with id 1a3497a2d75a07865dde9c8889b6aeb0b8de63ad8f97ad5b76a6d792e8a53f02 Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.113160 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5"] Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.137919 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hq88c\" (UniqueName: \"kubernetes.io/projected/b43f39ef-60cd-44be-8061-715fbf71a36b-kube-api-access-hq88c\") pod \"console-f9d7485db-2jt99\" (UID: \"b43f39ef-60cd-44be-8061-715fbf71a36b\") " pod="openshift-console/console-f9d7485db-2jt99" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.157076 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e3d1074f-cb7e-4fa5-9477-e6c1835b6488-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-psjfm\" (UID: \"e3d1074f-cb7e-4fa5-9477-e6c1835b6488\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-psjfm" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.175412 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-n7vrk"] Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.176655 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xj5cl\" (UniqueName: \"kubernetes.io/projected/d7dccbf4-f7cc-4306-b244-605f2849a805-kube-api-access-xj5cl\") pod \"etcd-operator-b45778765-86rlm\" (UID: \"d7dccbf4-f7cc-4306-b244-605f2849a805\") " pod="openshift-etcd-operator/etcd-operator-b45778765-86rlm" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.203159 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6wrk\" (UniqueName: \"kubernetes.io/projected/831746c3-7123-4189-a551-7f7852402807-kube-api-access-x6wrk\") pod \"console-operator-58897d9998-cw9t7\" (UID: \"831746c3-7123-4189-a551-7f7852402807\") " pod="openshift-console-operator/console-operator-58897d9998-cw9t7" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.268666 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-nxmj9"] Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.269835 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skx67\" (UniqueName: \"kubernetes.io/projected/87bcf788-c637-4ed6-a5da-650979a40203-kube-api-access-skx67\") pod \"router-default-5444994796-dkgwj\" (UID: \"87bcf788-c637-4ed6-a5da-650979a40203\") " pod="openshift-ingress/router-default-5444994796-dkgwj" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.270653 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zs2xl\" (UniqueName: \"kubernetes.io/projected/e099f402-eb6b-4a1c-b5f6-f713fff68945-kube-api-access-zs2xl\") pod \"multus-admission-controller-857f4d67dd-88l2c\" (UID: \"e099f402-eb6b-4a1c-b5f6-f713fff68945\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-88l2c" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.280562 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2tq9v\" (UniqueName: \"kubernetes.io/projected/8d27792d-685d-4a24-96f6-e38710e1cd6c-kube-api-access-2tq9v\") pod \"packageserver-d55dfcdfc-jkdzt\" (UID: \"8d27792d-685d-4a24-96f6-e38710e1cd6c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jkdzt" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.280686 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6vq7\" (UniqueName: \"kubernetes.io/projected/5681e3b7-2931-4ebe-a12c-872ad9ab9906-kube-api-access-d6vq7\") pod \"olm-operator-6b444d44fb-nxp6d\" (UID: \"5681e3b7-2931-4ebe-a12c-872ad9ab9906\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nxp6d" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.283637 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-cw9t7" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.292326 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqf8n\" (UniqueName: \"kubernetes.io/projected/664dcbda-0324-4957-84e5-309cbd624afc-kube-api-access-bqf8n\") pod \"dns-operator-744455d44c-8qsxn\" (UID: \"664dcbda-0324-4957-84e5-309cbd624afc\") " pod="openshift-dns-operator/dns-operator-744455d44c-8qsxn" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.294206 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-86rlm" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.313690 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-88l2c" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.313938 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/27951f81-2e39-4d40-a0f9-cd6f66265a41-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-bsmnf\" (UID: \"27951f81-2e39-4d40-a0f9-cd6f66265a41\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bsmnf" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.322054 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-r6s9j"] Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.322253 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-2jt99" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.332969 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-496f7\" (UniqueName: \"kubernetes.io/projected/b5684173-3dc4-40bb-8424-096e087c1afd-kube-api-access-496f7\") pod \"machine-config-controller-84d6567774-5w6zp\" (UID: \"b5684173-3dc4-40bb-8424-096e087c1afd\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5w6zp" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.353810 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-dslqq"] Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.355429 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2d6j\" (UniqueName: \"kubernetes.io/projected/cec0c8d3-e486-4b4c-8f49-9a04926b7f05-kube-api-access-z2d6j\") pod \"package-server-manager-789f6589d5-qgwds\" (UID: \"cec0c8d3-e486-4b4c-8f49-9a04926b7f05\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qgwds" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.370975 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b46t4\" (UniqueName: \"kubernetes.io/projected/8e5b9cf7-2752-4ac4-804a-7b88df12b435-kube-api-access-b46t4\") pod \"service-ca-operator-777779d784-4m2tr\" (UID: \"8e5b9cf7-2752-4ac4-804a-7b88df12b435\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-4m2tr" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.374631 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5w6zp" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.383071 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jkdzt" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.391089 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69drk\" (UniqueName: \"kubernetes.io/projected/dfb130da-bcbf-46d8-8e7e-2b1f17fe71ce-kube-api-access-69drk\") pod \"cluster-samples-operator-665b6dd947-2g644\" (UID: \"dfb130da-bcbf-46d8-8e7e-2b1f17fe71ce\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2g644" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.404221 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-psjfm" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.409621 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dnv27\" (UniqueName: \"kubernetes.io/projected/9ab7be4d-a4d6-4857-ba26-2ade2b6b3917-kube-api-access-dnv27\") pod \"service-ca-9c57cc56f-x47v9\" (UID: \"9ab7be4d-a4d6-4857-ba26-2ade2b6b3917\") " pod="openshift-service-ca/service-ca-9c57cc56f-x47v9" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.439821 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jjx2\" (UniqueName: \"kubernetes.io/projected/92ada4c8-acb9-4740-85dc-815cd8a3b028-kube-api-access-5jjx2\") pod \"downloads-7954f5f757-pgx7w\" (UID: \"92ada4c8-acb9-4740-85dc-815cd8a3b028\") " pod="openshift-console/downloads-7954f5f757-pgx7w" Jan 04 11:50:23 crc kubenswrapper[5003]: W0104 11:50:23.440296 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7044cdc7_b7c2_454e_9460_8f6b783f85eb.slice/crio-29b919e827589bf71b73a5d3ad82324bc6b4425911a511318e53c0bf540c1c8c WatchSource:0}: Error finding container 29b919e827589bf71b73a5d3ad82324bc6b4425911a511318e53c0bf540c1c8c: Status 404 returned error can't find the container with id 29b919e827589bf71b73a5d3ad82324bc6b4425911a511318e53c0bf540c1c8c Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.467403 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhlcj\" (UniqueName: \"kubernetes.io/projected/ca95e54c-a1d3-40a8-9bb4-fa314e9fe161-kube-api-access-fhlcj\") pod \"machine-config-operator-74547568cd-stxr8\" (UID: \"ca95e54c-a1d3-40a8-9bb4-fa314e9fe161\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-stxr8" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.483371 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thswx\" (UniqueName: \"kubernetes.io/projected/56927c4a-a76e-4c0e-b2d4-2ff8cf31d8eb-kube-api-access-thswx\") pod \"migrator-59844c95c7-b5flr\" (UID: \"56927c4a-a76e-4c0e-b2d4-2ff8cf31d8eb\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-b5flr" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.485263 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nxp6d" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.493598 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-dkgwj" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.496733 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a5f82de9-292f-47ab-9590-00f63a73a25d-bound-sa-token\") pod \"ingress-operator-5b745b69d9-dphl6\" (UID: \"a5f82de9-292f-47ab-9590-00f63a73a25d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dphl6" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.500557 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-stxr8" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.508412 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2g644" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.515726 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-8qsxn" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.517129 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqgp5\" (UniqueName: \"kubernetes.io/projected/a5f82de9-292f-47ab-9590-00f63a73a25d-kube-api-access-lqgp5\") pod \"ingress-operator-5b745b69d9-dphl6\" (UID: \"a5f82de9-292f-47ab-9590-00f63a73a25d\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dphl6" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.537240 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bv66\" (UniqueName: \"kubernetes.io/projected/508261e1-05b6-486d-9724-768d8729d7dd-kube-api-access-8bv66\") pod \"oauth-openshift-558db77b4-pd2px\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.554298 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjlg4\" (UniqueName: \"kubernetes.io/projected/c99d5981-7c0d-49d6-8b8e-8cb2c8c1027a-kube-api-access-sjlg4\") pod \"control-plane-machine-set-operator-78cbb6b69f-zgmzz\" (UID: \"c99d5981-7c0d-49d6-8b8e-8cb2c8c1027a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zgmzz" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.558486 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-4m2tr" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.575578 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45x9h\" (UniqueName: \"kubernetes.io/projected/1886cfcf-db6d-49b2-8f0a-4637996373db-kube-api-access-45x9h\") pod \"collect-profiles-29458785-q4d9v\" (UID: \"1886cfcf-db6d-49b2-8f0a-4637996373db\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-q4d9v" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.576466 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dphl6" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.589918 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bsmnf" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.596818 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crf55\" (UniqueName: \"kubernetes.io/projected/9f843c7a-a6a4-4d2d-976f-0a955e8a2777-kube-api-access-crf55\") pod \"catalog-operator-68c6474976-t6nrd\" (UID: \"9f843c7a-a6a4-4d2d-976f-0a955e8a2777\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-t6nrd" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.601894 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zgmzz" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.611742 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-t6nrd" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.616702 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87sxh\" (UniqueName: \"kubernetes.io/projected/759c4131-a27e-4a35-b609-6a431eff05a6-kube-api-access-87sxh\") pod \"openshift-controller-manager-operator-756b6f6bc6-4k548\" (UID: \"759c4131-a27e-4a35-b609-6a431eff05a6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4k548" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.626301 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qgwds" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.640827 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a7865114-eb86-4994-b91d-3c39f9cee6a5-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-6smzv\" (UID: \"a7865114-eb86-4994-b91d-3c39f9cee6a5\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6smzv" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.661301 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-b5flr" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.668765 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-x47v9" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.676891 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pks9v\" (UniqueName: \"kubernetes.io/projected/62eb9818-98b0-40f2-9629-78923c5b112f-kube-api-access-pks9v\") pod \"openshift-config-operator-7777fb866f-cd6dv\" (UID: \"62eb9818-98b0-40f2-9629-78923c5b112f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cd6dv" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.689375 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.690608 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.696719 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-r6s9j" event={"ID":"b9f2bf47-0b56-44cd-ba31-3e9a5320186d","Type":"ContainerStarted","Data":"9ddc909ebfa26361df7c00a2391c9610c8aa2800e6e3897641b6f88f8ac9db67"} Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.697050 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-pgx7w" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.702186 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.712367 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nxmj9" event={"ID":"40dada5c-5a67-4362-b9fb-e49a7fc32307","Type":"ContainerStarted","Data":"ea477a89e313ea3aac2911777d2d07de24287c464bc423950818313935825ea7"} Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.713025 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-5w6zp"] Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.715393 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-dslqq" event={"ID":"ef5fdbe3-490c-476b-af83-a810d6e5a888","Type":"ContainerStarted","Data":"721d1c6d06b94e55ebefbed36bbd3b92f2108c4f7410ebefd8f6a5719ac450cb"} Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.720308 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.732096 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fpmfh" event={"ID":"f2f9ef4d-14c5-47d9-9e7b-234cde2de773","Type":"ContainerStarted","Data":"d7295de4cff0a51e3c411f33b72ee7d01c02831ab3a188eac4473b0605605820"} Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.732148 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fpmfh" event={"ID":"f2f9ef4d-14c5-47d9-9e7b-234cde2de773","Type":"ContainerStarted","Data":"a0f8ac88838feb0ae26f6c20b12f729ae107d562f7caada46266827438437c87"} Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.745168 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.760285 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-88l2c"] Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.763674 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-n7vrk" event={"ID":"7044cdc7-b7c2-454e-9460-8f6b783f85eb","Type":"ContainerStarted","Data":"29b919e827589bf71b73a5d3ad82324bc6b4425911a511318e53c0bf540c1c8c"} Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.779303 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4k548" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.780098 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" event={"ID":"f69f13d6-7550-471c-b84e-e62b06c17c9b","Type":"ContainerStarted","Data":"4461fd637d0fbd40848af87562ec08753abdcf46824207ff5371d2ff2ca9fc41"} Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.780162 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/edc8a447-cc41-4241-be6a-957fa4255108-registry-tls\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.780360 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/edc8a447-cc41-4241-be6a-957fa4255108-bound-sa-token\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.780380 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/edc8a447-cc41-4241-be6a-957fa4255108-ca-trust-extracted\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.780510 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.780529 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/edc8a447-cc41-4241-be6a-957fa4255108-registry-certificates\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.780547 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4tnb\" (UniqueName: \"kubernetes.io/projected/edc8a447-cc41-4241-be6a-957fa4255108-kube-api-access-z4tnb\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.780582 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/26a96160-4284-44b2-89de-ea0b0e8b8a21-socket-dir\") pod \"csi-hostpathplugin-bjp7f\" (UID: \"26a96160-4284-44b2-89de-ea0b0e8b8a21\") " pod="hostpath-provisioner/csi-hostpathplugin-bjp7f" Jan 04 11:50:23 crc kubenswrapper[5003]: E0104 11:50:23.783591 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:24.283567656 +0000 UTC m=+139.756597487 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.784615 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/26a96160-4284-44b2-89de-ea0b0e8b8a21-registration-dir\") pod \"csi-hostpathplugin-bjp7f\" (UID: \"26a96160-4284-44b2-89de-ea0b0e8b8a21\") " pod="hostpath-provisioner/csi-hostpathplugin-bjp7f" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.784659 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/26a96160-4284-44b2-89de-ea0b0e8b8a21-plugins-dir\") pod \"csi-hostpathplugin-bjp7f\" (UID: \"26a96160-4284-44b2-89de-ea0b0e8b8a21\") " pod="hostpath-provisioner/csi-hostpathplugin-bjp7f" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.786444 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/26a96160-4284-44b2-89de-ea0b0e8b8a21-mountpoint-dir\") pod \"csi-hostpathplugin-bjp7f\" (UID: \"26a96160-4284-44b2-89de-ea0b0e8b8a21\") " pod="hostpath-provisioner/csi-hostpathplugin-bjp7f" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.786499 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/26a96160-4284-44b2-89de-ea0b0e8b8a21-csi-data-dir\") pod \"csi-hostpathplugin-bjp7f\" (UID: \"26a96160-4284-44b2-89de-ea0b0e8b8a21\") " pod="hostpath-provisioner/csi-hostpathplugin-bjp7f" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.786549 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/edc8a447-cc41-4241-be6a-957fa4255108-trusted-ca\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.786707 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/edc8a447-cc41-4241-be6a-957fa4255108-installation-pull-secrets\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.786737 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jssth\" (UniqueName: \"kubernetes.io/projected/26a96160-4284-44b2-89de-ea0b0e8b8a21-kube-api-access-jssth\") pod \"csi-hostpathplugin-bjp7f\" (UID: \"26a96160-4284-44b2-89de-ea0b0e8b8a21\") " pod="hostpath-provisioner/csi-hostpathplugin-bjp7f" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.790782 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-2jt99"] Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.831498 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-tk9gz" event={"ID":"8006abe7-31d8-489d-9005-d96d40bb9ba5","Type":"ContainerStarted","Data":"04fe15c1cd00c4072d285d953ee622494d0c7d2d2f1d7202f33d4e3526178996"} Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.831561 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-tk9gz" event={"ID":"8006abe7-31d8-489d-9005-d96d40bb9ba5","Type":"ContainerStarted","Data":"7f8437beccc07bc5d223da57b86f240f2a510a4dbe614c1026e139c9bbc085e0"} Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.832170 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-tk9gz" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.832647 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-86rlm"] Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.840511 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" event={"ID":"19e87eb5-dc5b-4012-b862-22c8083c247b","Type":"ContainerStarted","Data":"1a3497a2d75a07865dde9c8889b6aeb0b8de63ad8f97ad5b76a6d792e8a53f02"} Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.854776 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cd6dv" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.855116 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-tk9gz" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.863978 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-q4d9v" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.869287 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6smzv" Jan 04 11:50:23 crc kubenswrapper[5003]: W0104 11:50:23.878754 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod87bcf788_c637_4ed6_a5da_650979a40203.slice/crio-fbec3111b12540613f416bc5319095e91037bf86352102e128499d81720345a8 WatchSource:0}: Error finding container fbec3111b12540613f416bc5319095e91037bf86352102e128499d81720345a8: Status 404 returned error can't find the container with id fbec3111b12540613f416bc5319095e91037bf86352102e128499d81720345a8 Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.890937 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.891155 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/fad64100-7cb6-4457-9b74-ccc0cdf1dbb1-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-m6xcl\" (UID: \"fad64100-7cb6-4457-9b74-ccc0cdf1dbb1\") " pod="openshift-marketplace/marketplace-operator-79b997595-m6xcl" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.891165 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-cw9t7"] Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.891270 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/edc8a447-cc41-4241-be6a-957fa4255108-registry-certificates\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.891304 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4tnb\" (UniqueName: \"kubernetes.io/projected/edc8a447-cc41-4241-be6a-957fa4255108-kube-api-access-z4tnb\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.891339 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/26a96160-4284-44b2-89de-ea0b0e8b8a21-socket-dir\") pod \"csi-hostpathplugin-bjp7f\" (UID: \"26a96160-4284-44b2-89de-ea0b0e8b8a21\") " pod="hostpath-provisioner/csi-hostpathplugin-bjp7f" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.891381 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/5271e5a8-5812-491f-86d3-8cd4504ef32f-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-db64n\" (UID: \"5271e5a8-5812-491f-86d3-8cd4504ef32f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-db64n" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.891476 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f0cdb8ef-3e38-4f3c-8bb2-359da008a4ed-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-q977v\" (UID: \"f0cdb8ef-3e38-4f3c-8bb2-359da008a4ed\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q977v" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.891520 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59vrc\" (UniqueName: \"kubernetes.io/projected/f0cdb8ef-3e38-4f3c-8bb2-359da008a4ed-kube-api-access-59vrc\") pod \"kube-storage-version-migrator-operator-b67b599dd-q977v\" (UID: \"f0cdb8ef-3e38-4f3c-8bb2-359da008a4ed\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q977v" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.891552 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/26a96160-4284-44b2-89de-ea0b0e8b8a21-registration-dir\") pod \"csi-hostpathplugin-bjp7f\" (UID: \"26a96160-4284-44b2-89de-ea0b0e8b8a21\") " pod="hostpath-provisioner/csi-hostpathplugin-bjp7f" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.891585 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/26a96160-4284-44b2-89de-ea0b0e8b8a21-plugins-dir\") pod \"csi-hostpathplugin-bjp7f\" (UID: \"26a96160-4284-44b2-89de-ea0b0e8b8a21\") " pod="hostpath-provisioner/csi-hostpathplugin-bjp7f" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.891703 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/edc8a447-cc41-4241-be6a-957fa4255108-trusted-ca\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.891735 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/26a96160-4284-44b2-89de-ea0b0e8b8a21-mountpoint-dir\") pod \"csi-hostpathplugin-bjp7f\" (UID: \"26a96160-4284-44b2-89de-ea0b0e8b8a21\") " pod="hostpath-provisioner/csi-hostpathplugin-bjp7f" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.891754 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/26a96160-4284-44b2-89de-ea0b0e8b8a21-csi-data-dir\") pod \"csi-hostpathplugin-bjp7f\" (UID: \"26a96160-4284-44b2-89de-ea0b0e8b8a21\") " pod="hostpath-provisioner/csi-hostpathplugin-bjp7f" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.891779 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/edc8a447-cc41-4241-be6a-957fa4255108-installation-pull-secrets\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.891809 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jssth\" (UniqueName: \"kubernetes.io/projected/26a96160-4284-44b2-89de-ea0b0e8b8a21-kube-api-access-jssth\") pod \"csi-hostpathplugin-bjp7f\" (UID: \"26a96160-4284-44b2-89de-ea0b0e8b8a21\") " pod="hostpath-provisioner/csi-hostpathplugin-bjp7f" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.891829 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5271e5a8-5812-491f-86d3-8cd4504ef32f-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-db64n\" (UID: \"5271e5a8-5812-491f-86d3-8cd4504ef32f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-db64n" Jan 04 11:50:23 crc kubenswrapper[5003]: E0104 11:50:23.894087 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:24.394066697 +0000 UTC m=+139.867096538 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.896249 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/26a96160-4284-44b2-89de-ea0b0e8b8a21-csi-data-dir\") pod \"csi-hostpathplugin-bjp7f\" (UID: \"26a96160-4284-44b2-89de-ea0b0e8b8a21\") " pod="hostpath-provisioner/csi-hostpathplugin-bjp7f" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.896616 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/26a96160-4284-44b2-89de-ea0b0e8b8a21-registration-dir\") pod \"csi-hostpathplugin-bjp7f\" (UID: \"26a96160-4284-44b2-89de-ea0b0e8b8a21\") " pod="hostpath-provisioner/csi-hostpathplugin-bjp7f" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.896651 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/26a96160-4284-44b2-89de-ea0b0e8b8a21-plugins-dir\") pod \"csi-hostpathplugin-bjp7f\" (UID: \"26a96160-4284-44b2-89de-ea0b0e8b8a21\") " pod="hostpath-provisioner/csi-hostpathplugin-bjp7f" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.897146 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5271e5a8-5812-491f-86d3-8cd4504ef32f-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-db64n\" (UID: \"5271e5a8-5812-491f-86d3-8cd4504ef32f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-db64n" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.897220 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f0cdb8ef-3e38-4f3c-8bb2-359da008a4ed-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-q977v\" (UID: \"f0cdb8ef-3e38-4f3c-8bb2-359da008a4ed\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q977v" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.897788 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/edc8a447-cc41-4241-be6a-957fa4255108-registry-tls\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.897860 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhn9f\" (UniqueName: \"kubernetes.io/projected/fad64100-7cb6-4457-9b74-ccc0cdf1dbb1-kube-api-access-qhn9f\") pod \"marketplace-operator-79b997595-m6xcl\" (UID: \"fad64100-7cb6-4457-9b74-ccc0cdf1dbb1\") " pod="openshift-marketplace/marketplace-operator-79b997595-m6xcl" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.897978 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/edc8a447-cc41-4241-be6a-957fa4255108-registry-certificates\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.897993 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/26a96160-4284-44b2-89de-ea0b0e8b8a21-mountpoint-dir\") pod \"csi-hostpathplugin-bjp7f\" (UID: \"26a96160-4284-44b2-89de-ea0b0e8b8a21\") " pod="hostpath-provisioner/csi-hostpathplugin-bjp7f" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.899961 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/26a96160-4284-44b2-89de-ea0b0e8b8a21-socket-dir\") pod \"csi-hostpathplugin-bjp7f\" (UID: \"26a96160-4284-44b2-89de-ea0b0e8b8a21\") " pod="hostpath-provisioner/csi-hostpathplugin-bjp7f" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.900664 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/edc8a447-cc41-4241-be6a-957fa4255108-bound-sa-token\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.900931 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/edc8a447-cc41-4241-be6a-957fa4255108-ca-trust-extracted\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.900979 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pqq9\" (UniqueName: \"kubernetes.io/projected/5271e5a8-5812-491f-86d3-8cd4504ef32f-kube-api-access-6pqq9\") pod \"cluster-image-registry-operator-dc59b4c8b-db64n\" (UID: \"5271e5a8-5812-491f-86d3-8cd4504ef32f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-db64n" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.901247 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fad64100-7cb6-4457-9b74-ccc0cdf1dbb1-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-m6xcl\" (UID: \"fad64100-7cb6-4457-9b74-ccc0cdf1dbb1\") " pod="openshift-marketplace/marketplace-operator-79b997595-m6xcl" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.901461 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/edc8a447-cc41-4241-be6a-957fa4255108-ca-trust-extracted\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.901786 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/edc8a447-cc41-4241-be6a-957fa4255108-trusted-ca\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.906494 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/edc8a447-cc41-4241-be6a-957fa4255108-registry-tls\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.908039 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/edc8a447-cc41-4241-be6a-957fa4255108-installation-pull-secrets\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.944451 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4tnb\" (UniqueName: \"kubernetes.io/projected/edc8a447-cc41-4241-be6a-957fa4255108-kube-api-access-z4tnb\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.956990 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jssth\" (UniqueName: \"kubernetes.io/projected/26a96160-4284-44b2-89de-ea0b0e8b8a21-kube-api-access-jssth\") pod \"csi-hostpathplugin-bjp7f\" (UID: \"26a96160-4284-44b2-89de-ea0b0e8b8a21\") " pod="hostpath-provisioner/csi-hostpathplugin-bjp7f" Jan 04 11:50:23 crc kubenswrapper[5003]: I0104 11:50:23.993261 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/edc8a447-cc41-4241-be6a-957fa4255108-bound-sa-token\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.004498 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/50f1b88a-1e58-4f1c-b720-d0ac61cb923c-cert\") pod \"ingress-canary-r7kd8\" (UID: \"50f1b88a-1e58-4f1c-b720-d0ac61cb923c\") " pod="openshift-ingress-canary/ingress-canary-r7kd8" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.004614 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pqq9\" (UniqueName: \"kubernetes.io/projected/5271e5a8-5812-491f-86d3-8cd4504ef32f-kube-api-access-6pqq9\") pod \"cluster-image-registry-operator-dc59b4c8b-db64n\" (UID: \"5271e5a8-5812-491f-86d3-8cd4504ef32f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-db64n" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.004640 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fad64100-7cb6-4457-9b74-ccc0cdf1dbb1-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-m6xcl\" (UID: \"fad64100-7cb6-4457-9b74-ccc0cdf1dbb1\") " pod="openshift-marketplace/marketplace-operator-79b997595-m6xcl" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.004681 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/fad64100-7cb6-4457-9b74-ccc0cdf1dbb1-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-m6xcl\" (UID: \"fad64100-7cb6-4457-9b74-ccc0cdf1dbb1\") " pod="openshift-marketplace/marketplace-operator-79b997595-m6xcl" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.004703 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1571ddc2-e129-494e-a797-2a5ac2e94f94-metrics-tls\") pod \"dns-default-87wfq\" (UID: \"1571ddc2-e129-494e-a797-2a5ac2e94f94\") " pod="openshift-dns/dns-default-87wfq" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.004752 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.004785 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/5271e5a8-5812-491f-86d3-8cd4504ef32f-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-db64n\" (UID: \"5271e5a8-5812-491f-86d3-8cd4504ef32f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-db64n" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.004824 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f0cdb8ef-3e38-4f3c-8bb2-359da008a4ed-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-q977v\" (UID: \"f0cdb8ef-3e38-4f3c-8bb2-359da008a4ed\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q977v" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.004844 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bfqw6\" (UniqueName: \"kubernetes.io/projected/1571ddc2-e129-494e-a797-2a5ac2e94f94-kube-api-access-bfqw6\") pod \"dns-default-87wfq\" (UID: \"1571ddc2-e129-494e-a797-2a5ac2e94f94\") " pod="openshift-dns/dns-default-87wfq" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.004872 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59vrc\" (UniqueName: \"kubernetes.io/projected/f0cdb8ef-3e38-4f3c-8bb2-359da008a4ed-kube-api-access-59vrc\") pod \"kube-storage-version-migrator-operator-b67b599dd-q977v\" (UID: \"f0cdb8ef-3e38-4f3c-8bb2-359da008a4ed\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q977v" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.004929 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/84121051-e231-4b58-af2f-eb4701f9876d-node-bootstrap-token\") pod \"machine-config-server-vfq5w\" (UID: \"84121051-e231-4b58-af2f-eb4701f9876d\") " pod="openshift-machine-config-operator/machine-config-server-vfq5w" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.004994 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5271e5a8-5812-491f-86d3-8cd4504ef32f-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-db64n\" (UID: \"5271e5a8-5812-491f-86d3-8cd4504ef32f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-db64n" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.005045 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h77md\" (UniqueName: \"kubernetes.io/projected/50f1b88a-1e58-4f1c-b720-d0ac61cb923c-kube-api-access-h77md\") pod \"ingress-canary-r7kd8\" (UID: \"50f1b88a-1e58-4f1c-b720-d0ac61cb923c\") " pod="openshift-ingress-canary/ingress-canary-r7kd8" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.005065 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1571ddc2-e129-494e-a797-2a5ac2e94f94-config-volume\") pod \"dns-default-87wfq\" (UID: \"1571ddc2-e129-494e-a797-2a5ac2e94f94\") " pod="openshift-dns/dns-default-87wfq" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.005094 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5271e5a8-5812-491f-86d3-8cd4504ef32f-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-db64n\" (UID: \"5271e5a8-5812-491f-86d3-8cd4504ef32f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-db64n" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.005111 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f0cdb8ef-3e38-4f3c-8bb2-359da008a4ed-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-q977v\" (UID: \"f0cdb8ef-3e38-4f3c-8bb2-359da008a4ed\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q977v" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.005158 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htfdd\" (UniqueName: \"kubernetes.io/projected/84121051-e231-4b58-af2f-eb4701f9876d-kube-api-access-htfdd\") pod \"machine-config-server-vfq5w\" (UID: \"84121051-e231-4b58-af2f-eb4701f9876d\") " pod="openshift-machine-config-operator/machine-config-server-vfq5w" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.005225 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhn9f\" (UniqueName: \"kubernetes.io/projected/fad64100-7cb6-4457-9b74-ccc0cdf1dbb1-kube-api-access-qhn9f\") pod \"marketplace-operator-79b997595-m6xcl\" (UID: \"fad64100-7cb6-4457-9b74-ccc0cdf1dbb1\") " pod="openshift-marketplace/marketplace-operator-79b997595-m6xcl" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.005263 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/84121051-e231-4b58-af2f-eb4701f9876d-certs\") pod \"machine-config-server-vfq5w\" (UID: \"84121051-e231-4b58-af2f-eb4701f9876d\") " pod="openshift-machine-config-operator/machine-config-server-vfq5w" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.012261 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5271e5a8-5812-491f-86d3-8cd4504ef32f-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-db64n\" (UID: \"5271e5a8-5812-491f-86d3-8cd4504ef32f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-db64n" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.013439 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f0cdb8ef-3e38-4f3c-8bb2-359da008a4ed-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-q977v\" (UID: \"f0cdb8ef-3e38-4f3c-8bb2-359da008a4ed\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q977v" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.014285 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fad64100-7cb6-4457-9b74-ccc0cdf1dbb1-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-m6xcl\" (UID: \"fad64100-7cb6-4457-9b74-ccc0cdf1dbb1\") " pod="openshift-marketplace/marketplace-operator-79b997595-m6xcl" Jan 04 11:50:24 crc kubenswrapper[5003]: E0104 11:50:24.019523 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:24.51949869 +0000 UTC m=+139.992528531 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.023428 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f0cdb8ef-3e38-4f3c-8bb2-359da008a4ed-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-q977v\" (UID: \"f0cdb8ef-3e38-4f3c-8bb2-359da008a4ed\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q977v" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.024994 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/5271e5a8-5812-491f-86d3-8cd4504ef32f-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-db64n\" (UID: \"5271e5a8-5812-491f-86d3-8cd4504ef32f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-db64n" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.037030 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/fad64100-7cb6-4457-9b74-ccc0cdf1dbb1-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-m6xcl\" (UID: \"fad64100-7cb6-4457-9b74-ccc0cdf1dbb1\") " pod="openshift-marketplace/marketplace-operator-79b997595-m6xcl" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.040629 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhn9f\" (UniqueName: \"kubernetes.io/projected/fad64100-7cb6-4457-9b74-ccc0cdf1dbb1-kube-api-access-qhn9f\") pod \"marketplace-operator-79b997595-m6xcl\" (UID: \"fad64100-7cb6-4457-9b74-ccc0cdf1dbb1\") " pod="openshift-marketplace/marketplace-operator-79b997595-m6xcl" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.048305 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-bjp7f" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.061753 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pqq9\" (UniqueName: \"kubernetes.io/projected/5271e5a8-5812-491f-86d3-8cd4504ef32f-kube-api-access-6pqq9\") pod \"cluster-image-registry-operator-dc59b4c8b-db64n\" (UID: \"5271e5a8-5812-491f-86d3-8cd4504ef32f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-db64n" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.099236 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59vrc\" (UniqueName: \"kubernetes.io/projected/f0cdb8ef-3e38-4f3c-8bb2-359da008a4ed-kube-api-access-59vrc\") pod \"kube-storage-version-migrator-operator-b67b599dd-q977v\" (UID: \"f0cdb8ef-3e38-4f3c-8bb2-359da008a4ed\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q977v" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.116379 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.116501 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1571ddc2-e129-494e-a797-2a5ac2e94f94-metrics-tls\") pod \"dns-default-87wfq\" (UID: \"1571ddc2-e129-494e-a797-2a5ac2e94f94\") " pod="openshift-dns/dns-default-87wfq" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.116570 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bfqw6\" (UniqueName: \"kubernetes.io/projected/1571ddc2-e129-494e-a797-2a5ac2e94f94-kube-api-access-bfqw6\") pod \"dns-default-87wfq\" (UID: \"1571ddc2-e129-494e-a797-2a5ac2e94f94\") " pod="openshift-dns/dns-default-87wfq" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.116620 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/84121051-e231-4b58-af2f-eb4701f9876d-node-bootstrap-token\") pod \"machine-config-server-vfq5w\" (UID: \"84121051-e231-4b58-af2f-eb4701f9876d\") " pod="openshift-machine-config-operator/machine-config-server-vfq5w" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.116674 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h77md\" (UniqueName: \"kubernetes.io/projected/50f1b88a-1e58-4f1c-b720-d0ac61cb923c-kube-api-access-h77md\") pod \"ingress-canary-r7kd8\" (UID: \"50f1b88a-1e58-4f1c-b720-d0ac61cb923c\") " pod="openshift-ingress-canary/ingress-canary-r7kd8" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.116705 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1571ddc2-e129-494e-a797-2a5ac2e94f94-config-volume\") pod \"dns-default-87wfq\" (UID: \"1571ddc2-e129-494e-a797-2a5ac2e94f94\") " pod="openshift-dns/dns-default-87wfq" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.116740 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htfdd\" (UniqueName: \"kubernetes.io/projected/84121051-e231-4b58-af2f-eb4701f9876d-kube-api-access-htfdd\") pod \"machine-config-server-vfq5w\" (UID: \"84121051-e231-4b58-af2f-eb4701f9876d\") " pod="openshift-machine-config-operator/machine-config-server-vfq5w" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.116787 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/84121051-e231-4b58-af2f-eb4701f9876d-certs\") pod \"machine-config-server-vfq5w\" (UID: \"84121051-e231-4b58-af2f-eb4701f9876d\") " pod="openshift-machine-config-operator/machine-config-server-vfq5w" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.116839 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/50f1b88a-1e58-4f1c-b720-d0ac61cb923c-cert\") pod \"ingress-canary-r7kd8\" (UID: \"50f1b88a-1e58-4f1c-b720-d0ac61cb923c\") " pod="openshift-ingress-canary/ingress-canary-r7kd8" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.118233 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1571ddc2-e129-494e-a797-2a5ac2e94f94-config-volume\") pod \"dns-default-87wfq\" (UID: \"1571ddc2-e129-494e-a797-2a5ac2e94f94\") " pod="openshift-dns/dns-default-87wfq" Jan 04 11:50:24 crc kubenswrapper[5003]: E0104 11:50:24.119551 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:24.619276184 +0000 UTC m=+140.092306025 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.163680 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/84121051-e231-4b58-af2f-eb4701f9876d-certs\") pod \"machine-config-server-vfq5w\" (UID: \"84121051-e231-4b58-af2f-eb4701f9876d\") " pod="openshift-machine-config-operator/machine-config-server-vfq5w" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.175717 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1571ddc2-e129-494e-a797-2a5ac2e94f94-metrics-tls\") pod \"dns-default-87wfq\" (UID: \"1571ddc2-e129-494e-a797-2a5ac2e94f94\") " pod="openshift-dns/dns-default-87wfq" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.176094 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/50f1b88a-1e58-4f1c-b720-d0ac61cb923c-cert\") pod \"ingress-canary-r7kd8\" (UID: \"50f1b88a-1e58-4f1c-b720-d0ac61cb923c\") " pod="openshift-ingress-canary/ingress-canary-r7kd8" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.176763 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/84121051-e231-4b58-af2f-eb4701f9876d-node-bootstrap-token\") pod \"machine-config-server-vfq5w\" (UID: \"84121051-e231-4b58-af2f-eb4701f9876d\") " pod="openshift-machine-config-operator/machine-config-server-vfq5w" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.195726 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5271e5a8-5812-491f-86d3-8cd4504ef32f-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-db64n\" (UID: \"5271e5a8-5812-491f-86d3-8cd4504ef32f\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-db64n" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.215836 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htfdd\" (UniqueName: \"kubernetes.io/projected/84121051-e231-4b58-af2f-eb4701f9876d-kube-api-access-htfdd\") pod \"machine-config-server-vfq5w\" (UID: \"84121051-e231-4b58-af2f-eb4701f9876d\") " pod="openshift-machine-config-operator/machine-config-server-vfq5w" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.218129 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:24 crc kubenswrapper[5003]: E0104 11:50:24.218561 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:24.718549233 +0000 UTC m=+140.191579064 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.223095 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h77md\" (UniqueName: \"kubernetes.io/projected/50f1b88a-1e58-4f1c-b720-d0ac61cb923c-kube-api-access-h77md\") pod \"ingress-canary-r7kd8\" (UID: \"50f1b88a-1e58-4f1c-b720-d0ac61cb923c\") " pod="openshift-ingress-canary/ingress-canary-r7kd8" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.263483 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bfqw6\" (UniqueName: \"kubernetes.io/projected/1571ddc2-e129-494e-a797-2a5ac2e94f94-kube-api-access-bfqw6\") pod \"dns-default-87wfq\" (UID: \"1571ddc2-e129-494e-a797-2a5ac2e94f94\") " pod="openshift-dns/dns-default-87wfq" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.271651 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-psjfm"] Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.309924 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-m6xcl" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.317363 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-db64n" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.321306 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jkdzt"] Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.321610 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:24 crc kubenswrapper[5003]: E0104 11:50:24.321687 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:24.821667496 +0000 UTC m=+140.294697337 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.330233 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.329755 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q977v" Jan 04 11:50:24 crc kubenswrapper[5003]: E0104 11:50:24.330818 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:24.830796032 +0000 UTC m=+140.303825873 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.357670 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-87wfq" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.361127 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-r7kd8" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.368245 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-vfq5w" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.433003 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:24 crc kubenswrapper[5003]: E0104 11:50:24.433558 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:24.933539535 +0000 UTC m=+140.406569376 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.481462 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nxp6d"] Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.534684 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:24 crc kubenswrapper[5003]: E0104 11:50:24.535569 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:25.035553498 +0000 UTC m=+140.508583339 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.630342 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.636612 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:24 crc kubenswrapper[5003]: E0104 11:50:24.638986 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:25.138955628 +0000 UTC m=+140.611985469 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.738870 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:24 crc kubenswrapper[5003]: E0104 11:50:24.740173 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:25.24015059 +0000 UTC m=+140.713180651 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.839828 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:24 crc kubenswrapper[5003]: E0104 11:50:24.840264 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:25.340248421 +0000 UTC m=+140.813278262 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.882037 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nxp6d" event={"ID":"5681e3b7-2931-4ebe-a12c-872ad9ab9906","Type":"ContainerStarted","Data":"fd3285468de07051d5c2c28efccee09b6f00af7e9469eea2abb654745c446f4a"} Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.913540 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-dslqq" event={"ID":"ef5fdbe3-490c-476b-af83-a810d6e5a888","Type":"ContainerStarted","Data":"3bc08aa7fd387de8e6fe6f055878d70f83c51ae521e440100663c6f4924b777f"} Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.944871 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:24 crc kubenswrapper[5003]: E0104 11:50:24.952548 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:25.452512701 +0000 UTC m=+140.925542542 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:24 crc kubenswrapper[5003]: I0104 11:50:24.972919 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fpmfh" event={"ID":"f2f9ef4d-14c5-47d9-9e7b-234cde2de773","Type":"ContainerStarted","Data":"8d1531c02057c8a957032269bf5b6ef4eb6e44a0b1cbc334fa845dc5d91617aa"} Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.054160 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:25 crc kubenswrapper[5003]: E0104 11:50:25.055949 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:25.555921301 +0000 UTC m=+141.028951142 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.126450 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-n7vrk" event={"ID":"7044cdc7-b7c2-454e-9460-8f6b783f85eb","Type":"ContainerStarted","Data":"13a5be103f32be9dfcb9b59d2fb40a490f750eea7b7ee9981326d974ecac006b"} Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.155772 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:25 crc kubenswrapper[5003]: E0104 11:50:25.157793 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:25.65777187 +0000 UTC m=+141.130801711 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.188746 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-r6s9j" event={"ID":"b9f2bf47-0b56-44cd-ba31-3e9a5320186d","Type":"ContainerStarted","Data":"666d866fa144f3391d2b53e2079f89d70c4c1c3505598b2a026a6adce3c78b03"} Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.224545 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nxmj9" event={"ID":"40dada5c-5a67-4362-b9fb-e49a7fc32307","Type":"ContainerStarted","Data":"b22d31f5b66fff9450cc88d09772d17c5a42ff042041cca32e7f7b2b7b98d4a2"} Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.227131 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nxmj9" Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.258428 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:25 crc kubenswrapper[5003]: E0104 11:50:25.260093 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:25.760075842 +0000 UTC m=+141.233105683 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.274808 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nxmj9" Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.283584 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-vfq5w" event={"ID":"84121051-e231-4b58-af2f-eb4701f9876d","Type":"ContainerStarted","Data":"401cea17da1b2f8da9f7e1e9e74b16308b5d1e25f888582ea22b2341fe721bac"} Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.286409 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-4m2tr"] Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.305147 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-2jt99" event={"ID":"b43f39ef-60cd-44be-8061-715fbf71a36b","Type":"ContainerStarted","Data":"cfbc6c1c042653e271a7896a1a8776e0c225c047ce28af14de1dbeb53ef506a5"} Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.322998 5003 generic.go:334] "Generic (PLEG): container finished" podID="f69f13d6-7550-471c-b84e-e62b06c17c9b" containerID="c923052b6cc7903ea885e03658d2fb1bf483fd80ad0868fb1dea57f57d82bc1a" exitCode=0 Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.323084 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" event={"ID":"f69f13d6-7550-471c-b84e-e62b06c17c9b","Type":"ContainerDied","Data":"c923052b6cc7903ea885e03658d2fb1bf483fd80ad0868fb1dea57f57d82bc1a"} Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.360548 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:25 crc kubenswrapper[5003]: E0104 11:50:25.364775 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:25.864732616 +0000 UTC m=+141.337762457 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.376057 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5w6zp" event={"ID":"b5684173-3dc4-40bb-8424-096e087c1afd","Type":"ContainerStarted","Data":"87375e36318a13dabc632a1fc6c75f7b4bc08f99da1c277f444dfd83bf1e214f"} Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.398346 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-t6nrd"] Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.407338 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-psjfm" event={"ID":"e3d1074f-cb7e-4fa5-9477-e6c1835b6488","Type":"ContainerStarted","Data":"e3ce018ec21d4796746ceaa4e8afd8c9e64efc38c7f9470cab28d0cee638536f"} Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.409631 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-stxr8"] Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.425356 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-88l2c" event={"ID":"e099f402-eb6b-4a1c-b5f6-f713fff68945","Type":"ContainerStarted","Data":"8c29cbec5e4a25530687b9858bf859895be41b241e7ce40b5b4f382c1f9c4da4"} Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.438198 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jkdzt" event={"ID":"8d27792d-685d-4a24-96f6-e38710e1cd6c","Type":"ContainerStarted","Data":"b1bbff9771885d7cae09fd6422e755c6b58a93ab239f84906ce0021c2358e23d"} Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.439028 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jkdzt" Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.463579 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:25 crc kubenswrapper[5003]: E0104 11:50:25.464133 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:25.964094698 +0000 UTC m=+141.437124539 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.464383 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:25 crc kubenswrapper[5003]: E0104 11:50:25.466962 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:25.966946905 +0000 UTC m=+141.439976736 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.476025 5003 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-jkdzt container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.27:5443/healthz\": dial tcp 10.217.0.27:5443: connect: connection refused" start-of-body= Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.476093 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jkdzt" podUID="8d27792d-685d-4a24-96f6-e38710e1cd6c" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.27:5443/healthz\": dial tcp 10.217.0.27:5443: connect: connection refused" Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.513865 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-86rlm" event={"ID":"d7dccbf4-f7cc-4306-b244-605f2849a805","Type":"ContainerStarted","Data":"8dda05cafe82567e263717ca55cdd1d97f0f3089fe93ac90a6018c9066581df2"} Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.565515 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:25 crc kubenswrapper[5003]: E0104 11:50:25.567063 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:26.067038027 +0000 UTC m=+141.540067868 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.569688 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-dkgwj" event={"ID":"87bcf788-c637-4ed6-a5da-650979a40203","Type":"ContainerStarted","Data":"fbec3111b12540613f416bc5319095e91037bf86352102e128499d81720345a8"} Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.623288 5003 generic.go:334] "Generic (PLEG): container finished" podID="19e87eb5-dc5b-4012-b862-22c8083c247b" containerID="f45ab2ed59bc3d351ca10d9d800a639c2b469253986de823576cd49f0e9d5d50" exitCode=0 Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.623419 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" event={"ID":"19e87eb5-dc5b-4012-b862-22c8083c247b","Type":"ContainerDied","Data":"f45ab2ed59bc3d351ca10d9d800a639c2b469253986de823576cd49f0e9d5d50"} Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.641196 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zgmzz"] Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.651937 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-cw9t7" event={"ID":"831746c3-7123-4189-a551-7f7852402807","Type":"ContainerStarted","Data":"ac44f19764510ae44ae4b6f17c5242964ed156708e18c6a5ebaa8e6867cecbd6"} Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.651985 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-cw9t7" Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.667292 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:25 crc kubenswrapper[5003]: E0104 11:50:25.668074 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:26.168059783 +0000 UTC m=+141.641089624 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.709983 5003 patch_prober.go:28] interesting pod/console-operator-58897d9998-cw9t7 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.24:8443/readyz\": dial tcp 10.217.0.24:8443: connect: connection refused" start-of-body= Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.710128 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-cw9t7" podUID="831746c3-7123-4189-a551-7f7852402807" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.24:8443/readyz\": dial tcp 10.217.0.24:8443: connect: connection refused" Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.764516 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-b5flr"] Jan 04 11:50:25 crc kubenswrapper[5003]: W0104 11:50:25.769754 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc99d5981_7c0d_49d6_8b8e_8cb2c8c1027a.slice/crio-e2578cdf0cc5ae80d14a3ee8eb06d515479768a1a9c8f7ac15cad6e5c35b46cb WatchSource:0}: Error finding container e2578cdf0cc5ae80d14a3ee8eb06d515479768a1a9c8f7ac15cad6e5c35b46cb: Status 404 returned error can't find the container with id e2578cdf0cc5ae80d14a3ee8eb06d515479768a1a9c8f7ac15cad6e5c35b46cb Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.771716 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:25 crc kubenswrapper[5003]: E0104 11:50:25.772423 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:26.272391019 +0000 UTC m=+141.745420870 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.782270 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:25 crc kubenswrapper[5003]: E0104 11:50:25.786634 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:26.286618332 +0000 UTC m=+141.759648173 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.813307 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2g644"] Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.854943 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qgwds"] Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.870993 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bsmnf"] Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.876533 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-dkgwj" podStartSLOduration=117.876487109 podStartE2EDuration="1m57.876487109s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:25.845651679 +0000 UTC m=+141.318681520" watchObservedRunningTime="2026-01-04 11:50:25.876487109 +0000 UTC m=+141.349516950" Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.879472 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-8qsxn"] Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.885396 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:25 crc kubenswrapper[5003]: E0104 11:50:25.887056 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:26.387035352 +0000 UTC m=+141.860065193 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.887351 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:25 crc kubenswrapper[5003]: E0104 11:50:25.887773 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:26.387762752 +0000 UTC m=+141.860792593 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:25 crc kubenswrapper[5003]: I0104 11:50:25.989138 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:25 crc kubenswrapper[5003]: E0104 11:50:25.989824 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:26.489797356 +0000 UTC m=+141.962827197 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.006115 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-tk9gz" podStartSLOduration=118.006092784 podStartE2EDuration="1m58.006092784s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:26.004110741 +0000 UTC m=+141.477140582" watchObservedRunningTime="2026-01-04 11:50:26.006092784 +0000 UTC m=+141.479122625" Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.090946 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:26 crc kubenswrapper[5003]: E0104 11:50:26.091693 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:26.591672385 +0000 UTC m=+142.064702226 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.197839 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:26 crc kubenswrapper[5003]: E0104 11:50:26.198380 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:26.698356034 +0000 UTC m=+142.171385875 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.211374 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nxmj9" podStartSLOduration=118.211332093 podStartE2EDuration="1m58.211332093s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:26.197982444 +0000 UTC m=+141.671012285" watchObservedRunningTime="2026-01-04 11:50:26.211332093 +0000 UTC m=+141.684361934" Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.215040 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-dphl6"] Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.229145 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pd2px"] Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.254170 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-pgx7w"] Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.299331 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:26 crc kubenswrapper[5003]: E0104 11:50:26.299924 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:26.799907335 +0000 UTC m=+142.272937176 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.340384 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-cw9t7" podStartSLOduration=118.340360663 podStartE2EDuration="1m58.340360663s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:26.340362033 +0000 UTC m=+141.813391884" watchObservedRunningTime="2026-01-04 11:50:26.340360663 +0000 UTC m=+141.813390514" Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.357744 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4k548"] Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.357807 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-bjp7f"] Jan 04 11:50:26 crc kubenswrapper[5003]: W0104 11:50:26.374245 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod508261e1_05b6_486d_9724_768d8729d7dd.slice/crio-9f43035db60d1592fe68106ea26bc027087b12fa928687cab121ce1b4018d9bb WatchSource:0}: Error finding container 9f43035db60d1592fe68106ea26bc027087b12fa928687cab121ce1b4018d9bb: Status 404 returned error can't find the container with id 9f43035db60d1592fe68106ea26bc027087b12fa928687cab121ce1b4018d9bb Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.377243 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q977v"] Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.400140 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-87wfq"] Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.401233 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:26 crc kubenswrapper[5003]: E0104 11:50:26.402125 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:26.902001191 +0000 UTC m=+142.375031032 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.424752 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458785-q4d9v"] Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.496684 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-dkgwj" Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.506297 5003 patch_prober.go:28] interesting pod/router-default-5444994796-dkgwj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 04 11:50:26 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Jan 04 11:50:26 crc kubenswrapper[5003]: [+]process-running ok Jan 04 11:50:26 crc kubenswrapper[5003]: healthz check failed Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.506385 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-dkgwj" podUID="87bcf788-c637-4ed6-a5da-650979a40203" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.512157 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:26 crc kubenswrapper[5003]: E0104 11:50:26.512690 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:27.012677357 +0000 UTC m=+142.485707198 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.618056 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:26 crc kubenswrapper[5003]: E0104 11:50:26.619233 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:27.119211022 +0000 UTC m=+142.592240863 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.692937 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-dslqq" event={"ID":"ef5fdbe3-490c-476b-af83-a810d6e5a888","Type":"ContainerStarted","Data":"40910bbe780a78b41e068b39c54072e773fd55291b81b457c1fd748a08d3e13f"} Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.703346 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-88l2c" event={"ID":"e099f402-eb6b-4a1c-b5f6-f713fff68945","Type":"ContainerStarted","Data":"6ae03d6b687dbdffb82e4f12884eaf745a32720a761d5e49374586c8a85a2a35"} Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.713463 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5w6zp" event={"ID":"b5684173-3dc4-40bb-8424-096e087c1afd","Type":"ContainerStarted","Data":"78744448557c6678d800ed322f0ee42b45d2c91d56eaabc4f560807550f262db"} Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.713523 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5w6zp" event={"ID":"b5684173-3dc4-40bb-8424-096e087c1afd","Type":"ContainerStarted","Data":"123dd4462adc079013528098608ea6e3550c02475bff029b4e3847475ac3fa28"} Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.715693 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2g644" event={"ID":"dfb130da-bcbf-46d8-8e7e-2b1f17fe71ce","Type":"ContainerStarted","Data":"466cf1265b78cdf3f1e6d1df891beaa9a35d2c03ba30d602e215782dae111c08"} Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.717062 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qgwds" event={"ID":"cec0c8d3-e486-4b4c-8f49-9a04926b7f05","Type":"ContainerStarted","Data":"d654431fa6a0923f51917f14098af15a1a2754d1e94f1262c449ac40a2f49ab7"} Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.718328 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-q4d9v" event={"ID":"1886cfcf-db6d-49b2-8f0a-4637996373db","Type":"ContainerStarted","Data":"3c000948d035b41adac65c55c144463550140d98bc28c8edadd0f15491cbd8ca"} Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.719219 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dphl6" event={"ID":"a5f82de9-292f-47ab-9590-00f63a73a25d","Type":"ContainerStarted","Data":"6112db6b21f7fc5aef78948740bcc54f553eca82b2f1bfe420e26ba857afd283"} Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.720569 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:26 crc kubenswrapper[5003]: E0104 11:50:26.720978 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:27.220957129 +0000 UTC m=+142.693986970 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.747597 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-8qsxn" event={"ID":"664dcbda-0324-4957-84e5-309cbd624afc","Type":"ContainerStarted","Data":"8ea1a09388d4a3ec0670f67934b2396e73abe5e06f79daf69f808e214a7627ff"} Jan 04 11:50:26 crc kubenswrapper[5003]: W0104 11:50:26.767282 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf0cdb8ef_3e38_4f3c_8bb2_359da008a4ed.slice/crio-3261d99b7fd121f2fd289ce6ceb7e6bee4464407db04cd3bbcbbeb274246a2f4 WatchSource:0}: Error finding container 3261d99b7fd121f2fd289ce6ceb7e6bee4464407db04cd3bbcbbeb274246a2f4: Status 404 returned error can't find the container with id 3261d99b7fd121f2fd289ce6ceb7e6bee4464407db04cd3bbcbbeb274246a2f4 Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.800048 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-n7vrk" podStartSLOduration=119.800009774 podStartE2EDuration="1m59.800009774s" podCreationTimestamp="2026-01-04 11:48:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:26.38856458 +0000 UTC m=+141.861594421" watchObservedRunningTime="2026-01-04 11:50:26.800009774 +0000 UTC m=+142.273039615" Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.802870 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-r7kd8"] Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.828392 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:26 crc kubenswrapper[5003]: E0104 11:50:26.828640 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:27.328612594 +0000 UTC m=+142.801642435 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.829242 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:26 crc kubenswrapper[5003]: E0104 11:50:26.830488 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:27.330465814 +0000 UTC m=+142.803495655 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.857006 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-r6s9j" podStartSLOduration=119.856986147 podStartE2EDuration="1m59.856986147s" podCreationTimestamp="2026-01-04 11:48:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:26.485822215 +0000 UTC m=+141.958852056" watchObservedRunningTime="2026-01-04 11:50:26.856986147 +0000 UTC m=+142.330015978" Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.867513 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jkdzt" podStartSLOduration=118.867494289 podStartE2EDuration="1m58.867494289s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:26.626250912 +0000 UTC m=+142.099280763" watchObservedRunningTime="2026-01-04 11:50:26.867494289 +0000 UTC m=+142.340524130" Jan 04 11:50:26 crc kubenswrapper[5003]: W0104 11:50:26.869290 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod62eb9818_98b0_40f2_9629_78923c5b112f.slice/crio-e1027a929d8e8796ed2e85d817114eeeb511e3e7c6f760e6cf032313d5568dae WatchSource:0}: Error finding container e1027a929d8e8796ed2e85d817114eeeb511e3e7c6f760e6cf032313d5568dae: Status 404 returned error can't find the container with id e1027a929d8e8796ed2e85d817114eeeb511e3e7c6f760e6cf032313d5568dae Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.875471 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-86rlm" podStartSLOduration=118.875459054 podStartE2EDuration="1m58.875459054s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:26.680910342 +0000 UTC m=+142.153940183" watchObservedRunningTime="2026-01-04 11:50:26.875459054 +0000 UTC m=+142.348488895" Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.885852 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6smzv"] Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.885901 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-m6xcl"] Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.885912 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-cd6dv"] Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.886593 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-fpmfh" podStartSLOduration=119.886581883 podStartE2EDuration="1m59.886581883s" podCreationTimestamp="2026-01-04 11:48:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:26.703461168 +0000 UTC m=+142.176491009" watchObservedRunningTime="2026-01-04 11:50:26.886581883 +0000 UTC m=+142.359611724" Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.888370 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-psjfm" podStartSLOduration=118.88836272 podStartE2EDuration="1m58.88836272s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:26.745434107 +0000 UTC m=+142.218463938" watchObservedRunningTime="2026-01-04 11:50:26.88836272 +0000 UTC m=+142.361392561" Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.904429 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bsmnf" event={"ID":"27951f81-2e39-4d40-a0f9-cd6f66265a41","Type":"ContainerStarted","Data":"db85fcc371336871db757948b3475fb6d218b428147a71570841b0494726d053"} Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.911468 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-x47v9"] Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.920402 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-dslqq" podStartSLOduration=118.920380221 podStartE2EDuration="1m58.920380221s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:26.793024037 +0000 UTC m=+142.266053878" watchObservedRunningTime="2026-01-04 11:50:26.920380221 +0000 UTC m=+142.393410062" Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.921787 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5w6zp" podStartSLOduration=118.921778719 podStartE2EDuration="1m58.921778719s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:26.862123985 +0000 UTC m=+142.335153836" watchObservedRunningTime="2026-01-04 11:50:26.921778719 +0000 UTC m=+142.394808550" Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.922092 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-db64n"] Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.933055 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:26 crc kubenswrapper[5003]: E0104 11:50:26.933420 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:27.433385341 +0000 UTC m=+142.906415182 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.933631 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:26 crc kubenswrapper[5003]: E0104 11:50:26.934128 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:27.434120591 +0000 UTC m=+142.907150432 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.982379 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4k548" event={"ID":"759c4131-a27e-4a35-b609-6a431eff05a6","Type":"ContainerStarted","Data":"e2edb7d3c22ae48b926864578640e81e137dea6995cb6b18caf29146be0c04ed"} Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.984355 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-t6nrd" event={"ID":"9f843c7a-a6a4-4d2d-976f-0a955e8a2777","Type":"ContainerStarted","Data":"ff37ca6170360edda7f035a704163b9a300a9244b0e4d6130de53cebd685a1b3"} Jan 04 11:50:26 crc kubenswrapper[5003]: I0104 11:50:26.989706 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-stxr8" event={"ID":"ca95e54c-a1d3-40a8-9bb4-fa314e9fe161","Type":"ContainerStarted","Data":"64e48e396660b7bfbd0cd82a1aa305cae53df26e50e975a555fa21da39a396fc"} Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.004231 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-cw9t7" event={"ID":"831746c3-7123-4189-a551-7f7852402807","Type":"ContainerStarted","Data":"5a83e2de9b7be0d5bde3c376c0674afe7cd17ff0a95da6daacd150e7e4957069"} Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.015182 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jkdzt" event={"ID":"8d27792d-685d-4a24-96f6-e38710e1cd6c","Type":"ContainerStarted","Data":"c781d137848bf152f2e1ab9cfe8d92d662c58d21ab0173c3383c8a40f5cffd14"} Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.025189 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-psjfm" event={"ID":"e3d1074f-cb7e-4fa5-9477-e6c1835b6488","Type":"ContainerStarted","Data":"32354bc226e52fde0a2d7619c3442d71f7c4e0c5426816897739056bfdde349f"} Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.036035 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:27 crc kubenswrapper[5003]: E0104 11:50:27.036594 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:27.536573436 +0000 UTC m=+143.009603277 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.043967 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-b5flr" event={"ID":"56927c4a-a76e-4c0e-b2d4-2ff8cf31d8eb","Type":"ContainerStarted","Data":"e71f2fbee193b1a6c36156d60f9dc535dd982b495f67c74c6dddd7cc4394bef9"} Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.050769 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-vfq5w" event={"ID":"84121051-e231-4b58-af2f-eb4701f9876d","Type":"ContainerStarted","Data":"94e61e4f05d35c0d81d351353280a64481d284218fc65d716f3bbfe872a0e124"} Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.086654 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-dkgwj" event={"ID":"87bcf788-c637-4ed6-a5da-650979a40203","Type":"ContainerStarted","Data":"b7872b1cff5f6e15ab4b782ecb02a0586104693358bdad3849c1c9a83fcae03a"} Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.141785 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" event={"ID":"19e87eb5-dc5b-4012-b862-22c8083c247b","Type":"ContainerStarted","Data":"269c84662d8619f8ef0e4074808df84b7baf78f606a8842ceb4686d90ea6a4f2"} Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.149658 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:27 crc kubenswrapper[5003]: E0104 11:50:27.152855 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:27.652834333 +0000 UTC m=+143.125864374 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.170335 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-4m2tr" event={"ID":"8e5b9cf7-2752-4ac4-804a-7b88df12b435","Type":"ContainerStarted","Data":"10682506ba3b9c10feb82c542d533438cacd0d8ed64599bb56797553277d1488"} Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.198762 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-bjp7f" event={"ID":"26a96160-4284-44b2-89de-ea0b0e8b8a21","Type":"ContainerStarted","Data":"0c5f0c4fa0443b4ca9b9135adfdefafcbe13156fb2e771f87f073df6dde87634"} Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.227382 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nxp6d" event={"ID":"5681e3b7-2931-4ebe-a12c-872ad9ab9906","Type":"ContainerStarted","Data":"c7a00428f49be9cf3c36cd417dae2f3c2ccfc1d7d907cad3c47b36f5acccbc06"} Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.229409 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nxp6d" Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.229397 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-vfq5w" podStartSLOduration=6.229383601 podStartE2EDuration="6.229383601s" podCreationTimestamp="2026-01-04 11:50:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:27.099900229 +0000 UTC m=+142.572930060" watchObservedRunningTime="2026-01-04 11:50:27.229383601 +0000 UTC m=+142.702413442" Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.238439 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-4m2tr" podStartSLOduration=119.23824226 podStartE2EDuration="1m59.23824226s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:27.22708518 +0000 UTC m=+142.700115041" watchObservedRunningTime="2026-01-04 11:50:27.23824226 +0000 UTC m=+142.711272101" Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.251615 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:27 crc kubenswrapper[5003]: E0104 11:50:27.251899 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:27.751864066 +0000 UTC m=+143.224893907 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.252445 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:27 crc kubenswrapper[5003]: E0104 11:50:27.252781 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:27.75277479 +0000 UTC m=+143.225804631 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.253742 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zgmzz" event={"ID":"c99d5981-7c0d-49d6-8b8e-8cb2c8c1027a","Type":"ContainerStarted","Data":"e2578cdf0cc5ae80d14a3ee8eb06d515479768a1a9c8f7ac15cad6e5c35b46cb"} Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.278421 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nxp6d" podStartSLOduration=119.27839863 podStartE2EDuration="1m59.27839863s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:27.274970807 +0000 UTC m=+142.748000648" watchObservedRunningTime="2026-01-04 11:50:27.27839863 +0000 UTC m=+142.751428461" Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.282369 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-86rlm" event={"ID":"d7dccbf4-f7cc-4306-b244-605f2849a805","Type":"ContainerStarted","Data":"55b5a0f38d11bb5ed602536714a58143d102a1f0fd4899274ef597e60555773e"} Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.300268 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-nxp6d" Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.313157 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-2jt99" event={"ID":"b43f39ef-60cd-44be-8061-715fbf71a36b","Type":"ContainerStarted","Data":"63c8ee81a5000ef78c9c00b022f2b4b04b7a06dac08763d1177201e2bd0ab263"} Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.322753 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jkdzt" Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.330969 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" event={"ID":"508261e1-05b6-486d-9724-768d8729d7dd","Type":"ContainerStarted","Data":"9f43035db60d1592fe68106ea26bc027087b12fa928687cab121ce1b4018d9bb"} Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.343203 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-pgx7w" event={"ID":"92ada4c8-acb9-4740-85dc-815cd8a3b028","Type":"ContainerStarted","Data":"c9c451792015e3f358694e60b586e92b51cbecf1d748619b846439a87cae2228"} Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.344513 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-pgx7w" Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.347794 5003 patch_prober.go:28] interesting pod/downloads-7954f5f757-pgx7w container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.26:8080/\": dial tcp 10.217.0.26:8080: connect: connection refused" start-of-body= Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.347853 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-pgx7w" podUID="92ada4c8-acb9-4740-85dc-815cd8a3b028" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.26:8080/\": dial tcp 10.217.0.26:8080: connect: connection refused" Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.353096 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:27 crc kubenswrapper[5003]: E0104 11:50:27.353479 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:27.853451048 +0000 UTC m=+143.326480889 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.356458 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:27 crc kubenswrapper[5003]: E0104 11:50:27.357341 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:27.857325492 +0000 UTC m=+143.330355323 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.378640 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zgmzz" podStartSLOduration=119.378619205 podStartE2EDuration="1m59.378619205s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:27.314734657 +0000 UTC m=+142.787764498" watchObservedRunningTime="2026-01-04 11:50:27.378619205 +0000 UTC m=+142.851649046" Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.415138 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-2jt99" podStartSLOduration=119.415112996 podStartE2EDuration="1m59.415112996s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:27.387518494 +0000 UTC m=+142.860548335" watchObservedRunningTime="2026-01-04 11:50:27.415112996 +0000 UTC m=+142.888142837" Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.461790 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:27 crc kubenswrapper[5003]: E0104 11:50:27.467999 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:27.967972708 +0000 UTC m=+143.441002549 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.491717 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-cw9t7" Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.505754 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-pgx7w" podStartSLOduration=119.505733503 podStartE2EDuration="1m59.505733503s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:27.504295505 +0000 UTC m=+142.977325356" watchObservedRunningTime="2026-01-04 11:50:27.505733503 +0000 UTC m=+142.978763334" Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.513298 5003 patch_prober.go:28] interesting pod/router-default-5444994796-dkgwj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 04 11:50:27 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Jan 04 11:50:27 crc kubenswrapper[5003]: [+]process-running ok Jan 04 11:50:27 crc kubenswrapper[5003]: healthz check failed Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.513372 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-dkgwj" podUID="87bcf788-c637-4ed6-a5da-650979a40203" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.570651 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:27 crc kubenswrapper[5003]: E0104 11:50:27.572716 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:28.072696184 +0000 UTC m=+143.545726025 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.674963 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:27 crc kubenswrapper[5003]: E0104 11:50:27.675657 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:28.175630642 +0000 UTC m=+143.648660483 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.780455 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:27 crc kubenswrapper[5003]: E0104 11:50:27.780931 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:28.280917044 +0000 UTC m=+143.753946885 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.882406 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:27 crc kubenswrapper[5003]: E0104 11:50:27.882785 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:28.382752762 +0000 UTC m=+143.855782603 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.882868 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:27 crc kubenswrapper[5003]: E0104 11:50:27.883334 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:28.383317017 +0000 UTC m=+143.856346858 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:27 crc kubenswrapper[5003]: I0104 11:50:27.984272 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:27 crc kubenswrapper[5003]: E0104 11:50:27.984699 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:28.484680423 +0000 UTC m=+143.957710264 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.086234 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:28 crc kubenswrapper[5003]: E0104 11:50:28.086686 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:28.586662106 +0000 UTC m=+144.059691947 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.188087 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:28 crc kubenswrapper[5003]: E0104 11:50:28.188560 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:28.688540535 +0000 UTC m=+144.161570376 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.290649 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:28 crc kubenswrapper[5003]: E0104 11:50:28.291058 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:28.791038981 +0000 UTC m=+144.264068822 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.355719 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4k548" event={"ID":"759c4131-a27e-4a35-b609-6a431eff05a6","Type":"ContainerStarted","Data":"0b3be4a27bfa32db504048e996798e29c43824a2d863efbf0d798ab535f0b8f7"} Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.360968 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-b5flr" event={"ID":"56927c4a-a76e-4c0e-b2d4-2ff8cf31d8eb","Type":"ContainerStarted","Data":"1c03f413448b53d3d7e4622a8317962b91c7351c7febb614660a97b7c3aee01a"} Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.361049 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-b5flr" event={"ID":"56927c4a-a76e-4c0e-b2d4-2ff8cf31d8eb","Type":"ContainerStarted","Data":"72cee5721398b48a4ce05faf4bc745002817fc221b5bf0debe38b931b2e1db67"} Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.362498 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6smzv" event={"ID":"a7865114-eb86-4994-b91d-3c39f9cee6a5","Type":"ContainerStarted","Data":"2f9dcc5a3c5b5a089578503c38630c69b09c51477905fb8d698558b09fbf2221"} Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.379619 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-stxr8" event={"ID":"ca95e54c-a1d3-40a8-9bb4-fa314e9fe161","Type":"ContainerStarted","Data":"ef022fa037a567bf08cfdc7290e8fe335416dc16787a5c98c98a76aed3328810"} Jan 04 11:50:28 crc kubenswrapper[5003]: E0104 11:50:28.394040 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:28.89399533 +0000 UTC m=+144.367025171 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.395417 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.395742 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:28 crc kubenswrapper[5003]: E0104 11:50:28.396283 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:28.896268771 +0000 UTC m=+144.369298612 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.397401 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dphl6" event={"ID":"a5f82de9-292f-47ab-9590-00f63a73a25d","Type":"ContainerStarted","Data":"53a72a1162faa69072087f992f7bfb098943928ffafe556063d571ebff89998e"} Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.398716 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-4k548" podStartSLOduration=120.398706377 podStartE2EDuration="2m0.398706377s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:28.398586063 +0000 UTC m=+143.871615904" watchObservedRunningTime="2026-01-04 11:50:28.398706377 +0000 UTC m=+143.871736218" Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.452784 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qgwds" event={"ID":"cec0c8d3-e486-4b4c-8f49-9a04926b7f05","Type":"ContainerStarted","Data":"509f496ef9c343bd33b76550d67aa84eee86e9acafaf26ae9cf08871ede8dece"} Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.452832 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qgwds" event={"ID":"cec0c8d3-e486-4b4c-8f49-9a04926b7f05","Type":"ContainerStarted","Data":"a5e4463ed2f97993525489d9fed0692e68886304fd7bcb3a46ad78f1898a801d"} Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.452870 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qgwds" Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.498238 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:28 crc kubenswrapper[5003]: E0104 11:50:28.499706 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:28.999683812 +0000 UTC m=+144.472713653 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.500276 5003 patch_prober.go:28] interesting pod/router-default-5444994796-dkgwj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 04 11:50:28 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Jan 04 11:50:28 crc kubenswrapper[5003]: [+]process-running ok Jan 04 11:50:28 crc kubenswrapper[5003]: healthz check failed Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.500333 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-dkgwj" podUID="87bcf788-c637-4ed6-a5da-650979a40203" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.531326 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" event={"ID":"19e87eb5-dc5b-4012-b862-22c8083c247b","Type":"ContainerStarted","Data":"5688aee15a96a5d46151dd918764ebc0aedb6d23e17e31a1e47e53aa28ae8229"} Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.533797 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qgwds" podStartSLOduration=120.533783069 podStartE2EDuration="2m0.533783069s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:28.53305615 +0000 UTC m=+144.006085991" watchObservedRunningTime="2026-01-04 11:50:28.533783069 +0000 UTC m=+144.006812910" Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.552238 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zgmzz" event={"ID":"c99d5981-7c0d-49d6-8b8e-8cb2c8c1027a","Type":"ContainerStarted","Data":"a0ab97f75280a756b73ad17149ed7b611ecb6287b3689458ae5145a66dae6f3a"} Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.558051 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q977v" event={"ID":"f0cdb8ef-3e38-4f3c-8bb2-359da008a4ed","Type":"ContainerStarted","Data":"9f29646984dab4deba7c731cfa01899381239edd3ec59f58c73c0c1866b64111"} Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.558110 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q977v" event={"ID":"f0cdb8ef-3e38-4f3c-8bb2-359da008a4ed","Type":"ContainerStarted","Data":"3261d99b7fd121f2fd289ce6ceb7e6bee4464407db04cd3bbcbbeb274246a2f4"} Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.559911 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-m6xcl" event={"ID":"fad64100-7cb6-4457-9b74-ccc0cdf1dbb1","Type":"ContainerStarted","Data":"7ee4f9577180ff79da5ab2a59244787d1b2071c759c15e9e7518f9a606e3afed"} Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.560647 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-m6xcl" Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.566943 5003 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-m6xcl container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.32:8080/healthz\": dial tcp 10.217.0.32:8080: connect: connection refused" start-of-body= Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.567030 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-m6xcl" podUID="fad64100-7cb6-4457-9b74-ccc0cdf1dbb1" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.32:8080/healthz\": dial tcp 10.217.0.32:8080: connect: connection refused" Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.573679 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-pgx7w" event={"ID":"92ada4c8-acb9-4740-85dc-815cd8a3b028","Type":"ContainerStarted","Data":"87ee296cfeed0dc14c8d8df7fba63f665fed581266acb94f7100a1029b5b4922"} Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.574985 5003 patch_prober.go:28] interesting pod/downloads-7954f5f757-pgx7w container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.26:8080/\": dial tcp 10.217.0.26:8080: connect: connection refused" start-of-body= Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.575062 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-pgx7w" podUID="92ada4c8-acb9-4740-85dc-815cd8a3b028" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.26:8080/\": dial tcp 10.217.0.26:8080: connect: connection refused" Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.590294 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" event={"ID":"f69f13d6-7550-471c-b84e-e62b06c17c9b","Type":"ContainerStarted","Data":"000ae80fd806de6e211089be931dc4091b83578e9f20bcdecba53aa8bb5f98e3"} Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.600171 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:28 crc kubenswrapper[5003]: E0104 11:50:28.601536 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:29.101517971 +0000 UTC m=+144.574547812 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.602755 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-4m2tr" event={"ID":"8e5b9cf7-2752-4ac4-804a-7b88df12b435","Type":"ContainerStarted","Data":"024cb61be38c074c9e3da2f43c113712b804869762b81cc917028df6c8390c47"} Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.618743 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-x47v9" event={"ID":"9ab7be4d-a4d6-4857-ba26-2ade2b6b3917","Type":"ContainerStarted","Data":"cad09d532788fa894a7377149362d0b7138b576609bb10aeac48700265d39364"} Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.618884 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-x47v9" event={"ID":"9ab7be4d-a4d6-4857-ba26-2ade2b6b3917","Type":"ContainerStarted","Data":"5b90746e4a29d16c33aa1ce16a152fb560b8827147d6bddbbb38658d2d279931"} Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.622419 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-r7kd8" event={"ID":"50f1b88a-1e58-4f1c-b720-d0ac61cb923c","Type":"ContainerStarted","Data":"34a9999f1a75b59f7c380f7c50f4d72bfed9e5b861b276d371d54f05d2ba6075"} Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.637031 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" event={"ID":"508261e1-05b6-486d-9724-768d8729d7dd","Type":"ContainerStarted","Data":"37289b816f6f1b0c3420348bebb2c4f3f0ca56eefc3759e4fc4457cc73cf0e4e"} Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.637534 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.646583 5003 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-pd2px container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.41:6443/healthz\": dial tcp 10.217.0.41:6443: connect: connection refused" start-of-body= Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.646663 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" podUID="508261e1-05b6-486d-9724-768d8729d7dd" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.41:6443/healthz\": dial tcp 10.217.0.41:6443: connect: connection refused" Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.649276 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-q4d9v" event={"ID":"1886cfcf-db6d-49b2-8f0a-4637996373db","Type":"ContainerStarted","Data":"00707828a2f582e1f0ffd5a56dba21044e3b453f514e280d8f8df9f88e721644"} Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.665422 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" podStartSLOduration=121.665401159 podStartE2EDuration="2m1.665401159s" podCreationTimestamp="2026-01-04 11:48:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:28.608693774 +0000 UTC m=+144.081723625" watchObservedRunningTime="2026-01-04 11:50:28.665401159 +0000 UTC m=+144.138431000" Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.665547 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-8qsxn" event={"ID":"664dcbda-0324-4957-84e5-309cbd624afc","Type":"ContainerStarted","Data":"af2dd6af90fce751b3b76d10df279216e797fe8f5e5cfeb52f049f23a86c9a09"} Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.667352 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" podStartSLOduration=120.667345161 podStartE2EDuration="2m0.667345161s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:28.665686486 +0000 UTC m=+144.138716327" watchObservedRunningTime="2026-01-04 11:50:28.667345161 +0000 UTC m=+144.140375002" Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.695449 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-q977v" podStartSLOduration=120.695427266 podStartE2EDuration="2m0.695427266s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:28.690286088 +0000 UTC m=+144.163315929" watchObservedRunningTime="2026-01-04 11:50:28.695427266 +0000 UTC m=+144.168457107" Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.708653 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:28 crc kubenswrapper[5003]: E0104 11:50:28.710394 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:29.210371308 +0000 UTC m=+144.683401149 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.717865 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-88l2c" event={"ID":"e099f402-eb6b-4a1c-b5f6-f713fff68945","Type":"ContainerStarted","Data":"20d7481f7814eab2b0f8c355f36e1741327ecdd49236842af9ac5327a88db395"} Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.762070 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bsmnf" event={"ID":"27951f81-2e39-4d40-a0f9-cd6f66265a41","Type":"ContainerStarted","Data":"f3a00d1fd0012656cf01d36052eddbb9c3c5af46587f530e0ee3154e2ea4a280"} Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.787575 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-m6xcl" podStartSLOduration=120.787555054 podStartE2EDuration="2m0.787555054s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:28.728568087 +0000 UTC m=+144.201597928" watchObservedRunningTime="2026-01-04 11:50:28.787555054 +0000 UTC m=+144.260584905" Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.789923 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-88l2c" podStartSLOduration=120.789912897 podStartE2EDuration="2m0.789912897s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:28.787325298 +0000 UTC m=+144.260355139" watchObservedRunningTime="2026-01-04 11:50:28.789912897 +0000 UTC m=+144.262942738" Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.811142 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-q4d9v" podStartSLOduration=121.811123948 podStartE2EDuration="2m1.811123948s" podCreationTimestamp="2026-01-04 11:48:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:28.810646245 +0000 UTC m=+144.283676086" watchObservedRunningTime="2026-01-04 11:50:28.811123948 +0000 UTC m=+144.284153789" Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.825811 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:28 crc kubenswrapper[5003]: E0104 11:50:28.826271 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:29.326254664 +0000 UTC m=+144.799284505 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.835548 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-db64n" event={"ID":"5271e5a8-5812-491f-86d3-8cd4504ef32f","Type":"ContainerStarted","Data":"fa4587799750cbee9ec0e8ecfc35c8006612f7041032d2dbb5835a68704f45b7"} Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.835798 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2g644" event={"ID":"dfb130da-bcbf-46d8-8e7e-2b1f17fe71ce","Type":"ContainerStarted","Data":"98229ec3fb7172ba923a188cc912bbf3d4be7cd0c43e4218d6c027c954a25e1c"} Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.869064 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-87wfq" event={"ID":"1571ddc2-e129-494e-a797-2a5ac2e94f94","Type":"ContainerStarted","Data":"a6b0438726989b679778e76dce4a624e0961df3222eb3cda7113e7aa4ad80cfd"} Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.869121 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-87wfq" event={"ID":"1571ddc2-e129-494e-a797-2a5ac2e94f94","Type":"ContainerStarted","Data":"c50f72fc630f4045a915f743021fea4eca02f13e066ee3515279b0385270e745"} Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.870170 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-87wfq" Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.893193 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cd6dv" event={"ID":"62eb9818-98b0-40f2-9629-78923c5b112f","Type":"ContainerStarted","Data":"f795377417f758c426b50ad53d762edc6749c0ca053dfb5f0998bb4671488279"} Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.893238 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cd6dv" event={"ID":"62eb9818-98b0-40f2-9629-78923c5b112f","Type":"ContainerStarted","Data":"e1027a929d8e8796ed2e85d817114eeeb511e3e7c6f760e6cf032313d5568dae"} Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.917780 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" podStartSLOduration=121.917746995 podStartE2EDuration="2m1.917746995s" podCreationTimestamp="2026-01-04 11:48:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:28.880631487 +0000 UTC m=+144.353661328" watchObservedRunningTime="2026-01-04 11:50:28.917746995 +0000 UTC m=+144.390776836" Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.919480 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-x47v9" podStartSLOduration=120.919474201 podStartE2EDuration="2m0.919474201s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:28.918370782 +0000 UTC m=+144.391400623" watchObservedRunningTime="2026-01-04 11:50:28.919474201 +0000 UTC m=+144.392504042" Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.930669 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.933544 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-t6nrd" event={"ID":"9f843c7a-a6a4-4d2d-976f-0a955e8a2777","Type":"ContainerStarted","Data":"7d0e3fb7a9f368063adb858feb2d40becc77171c5d2d866131a0d5932f9c90e5"} Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.933632 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-t6nrd" Jan 04 11:50:28 crc kubenswrapper[5003]: E0104 11:50:28.931725 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:29.43169216 +0000 UTC m=+144.904722011 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.938119 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:28 crc kubenswrapper[5003]: E0104 11:50:28.939561 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:29.439536191 +0000 UTC m=+144.912566032 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.954969 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-r7kd8" podStartSLOduration=7.954949255 podStartE2EDuration="7.954949255s" podCreationTimestamp="2026-01-04 11:50:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:28.953546528 +0000 UTC m=+144.426576379" watchObservedRunningTime="2026-01-04 11:50:28.954949255 +0000 UTC m=+144.427979096" Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.958958 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-t6nrd" Jan 04 11:50:28 crc kubenswrapper[5003]: I0104 11:50:28.986985 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bsmnf" podStartSLOduration=120.986966716 podStartE2EDuration="2m0.986966716s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:28.985656061 +0000 UTC m=+144.458685902" watchObservedRunningTime="2026-01-04 11:50:28.986966716 +0000 UTC m=+144.459996557" Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.040699 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.043411 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-db64n" podStartSLOduration=121.043390164 podStartE2EDuration="2m1.043390164s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:29.016240124 +0000 UTC m=+144.489269965" watchObservedRunningTime="2026-01-04 11:50:29.043390164 +0000 UTC m=+144.516420005" Jan 04 11:50:29 crc kubenswrapper[5003]: E0104 11:50:29.049198 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:29.549161749 +0000 UTC m=+145.022191590 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.073100 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-t6nrd" podStartSLOduration=121.073080632 podStartE2EDuration="2m1.073080632s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:29.072143687 +0000 UTC m=+144.545173528" watchObservedRunningTime="2026-01-04 11:50:29.073080632 +0000 UTC m=+144.546110483" Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.142723 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-87wfq" podStartSLOduration=8.142691794 podStartE2EDuration="8.142691794s" podCreationTimestamp="2026-01-04 11:50:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:29.141910973 +0000 UTC m=+144.614940804" watchObservedRunningTime="2026-01-04 11:50:29.142691794 +0000 UTC m=+144.615721625" Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.143101 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2g644" podStartSLOduration=121.143096725 podStartE2EDuration="2m1.143096725s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:29.103385867 +0000 UTC m=+144.576415708" watchObservedRunningTime="2026-01-04 11:50:29.143096725 +0000 UTC m=+144.616126566" Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.150536 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:29 crc kubenswrapper[5003]: E0104 11:50:29.150929 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:29.650914465 +0000 UTC m=+145.123944306 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.252685 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:29 crc kubenswrapper[5003]: E0104 11:50:29.253681 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:29.753662899 +0000 UTC m=+145.226692740 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.354742 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:29 crc kubenswrapper[5003]: E0104 11:50:29.355254 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:29.85524064 +0000 UTC m=+145.328270481 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.442406 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gjbcx"] Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.454136 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gjbcx" Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.456493 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:29 crc kubenswrapper[5003]: E0104 11:50:29.457197 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:29.957167681 +0000 UTC m=+145.430197522 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.466664 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.473598 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gjbcx"] Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.509263 5003 patch_prober.go:28] interesting pod/router-default-5444994796-dkgwj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 04 11:50:29 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Jan 04 11:50:29 crc kubenswrapper[5003]: [+]process-running ok Jan 04 11:50:29 crc kubenswrapper[5003]: healthz check failed Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.509331 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-dkgwj" podUID="87bcf788-c637-4ed6-a5da-650979a40203" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.558769 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c5933bf-668f-4062-998b-3b2c5ad3a811-utilities\") pod \"community-operators-gjbcx\" (UID: \"6c5933bf-668f-4062-998b-3b2c5ad3a811\") " pod="openshift-marketplace/community-operators-gjbcx" Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.558851 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.559195 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c5933bf-668f-4062-998b-3b2c5ad3a811-catalog-content\") pod \"community-operators-gjbcx\" (UID: \"6c5933bf-668f-4062-998b-3b2c5ad3a811\") " pod="openshift-marketplace/community-operators-gjbcx" Jan 04 11:50:29 crc kubenswrapper[5003]: E0104 11:50:29.559216 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:30.059201705 +0000 UTC m=+145.532231546 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.559401 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4d975\" (UniqueName: \"kubernetes.io/projected/6c5933bf-668f-4062-998b-3b2c5ad3a811-kube-api-access-4d975\") pod \"community-operators-gjbcx\" (UID: \"6c5933bf-668f-4062-998b-3b2c5ad3a811\") " pod="openshift-marketplace/community-operators-gjbcx" Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.642426 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mxr5x"] Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.643796 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mxr5x" Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.648536 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.660647 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mxr5x"] Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.661802 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:29 crc kubenswrapper[5003]: E0104 11:50:29.662042 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:30.16200475 +0000 UTC m=+145.635034591 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.662459 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c5933bf-668f-4062-998b-3b2c5ad3a811-catalog-content\") pod \"community-operators-gjbcx\" (UID: \"6c5933bf-668f-4062-998b-3b2c5ad3a811\") " pod="openshift-marketplace/community-operators-gjbcx" Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.662585 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4d975\" (UniqueName: \"kubernetes.io/projected/6c5933bf-668f-4062-998b-3b2c5ad3a811-kube-api-access-4d975\") pod \"community-operators-gjbcx\" (UID: \"6c5933bf-668f-4062-998b-3b2c5ad3a811\") " pod="openshift-marketplace/community-operators-gjbcx" Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.662737 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c5933bf-668f-4062-998b-3b2c5ad3a811-utilities\") pod \"community-operators-gjbcx\" (UID: \"6c5933bf-668f-4062-998b-3b2c5ad3a811\") " pod="openshift-marketplace/community-operators-gjbcx" Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.662867 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.662943 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c5933bf-668f-4062-998b-3b2c5ad3a811-catalog-content\") pod \"community-operators-gjbcx\" (UID: \"6c5933bf-668f-4062-998b-3b2c5ad3a811\") " pod="openshift-marketplace/community-operators-gjbcx" Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.663310 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c5933bf-668f-4062-998b-3b2c5ad3a811-utilities\") pod \"community-operators-gjbcx\" (UID: \"6c5933bf-668f-4062-998b-3b2c5ad3a811\") " pod="openshift-marketplace/community-operators-gjbcx" Jan 04 11:50:29 crc kubenswrapper[5003]: E0104 11:50:29.663345 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:30.163327205 +0000 UTC m=+145.636357046 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.707865 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4d975\" (UniqueName: \"kubernetes.io/projected/6c5933bf-668f-4062-998b-3b2c5ad3a811-kube-api-access-4d975\") pod \"community-operators-gjbcx\" (UID: \"6c5933bf-668f-4062-998b-3b2c5ad3a811\") " pod="openshift-marketplace/community-operators-gjbcx" Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.764289 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.764546 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htccq\" (UniqueName: \"kubernetes.io/projected/b4141980-5ab0-4976-81aa-80a2245ae245-kube-api-access-htccq\") pod \"certified-operators-mxr5x\" (UID: \"b4141980-5ab0-4976-81aa-80a2245ae245\") " pod="openshift-marketplace/certified-operators-mxr5x" Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.764688 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4141980-5ab0-4976-81aa-80a2245ae245-utilities\") pod \"certified-operators-mxr5x\" (UID: \"b4141980-5ab0-4976-81aa-80a2245ae245\") " pod="openshift-marketplace/certified-operators-mxr5x" Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.764741 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4141980-5ab0-4976-81aa-80a2245ae245-catalog-content\") pod \"certified-operators-mxr5x\" (UID: \"b4141980-5ab0-4976-81aa-80a2245ae245\") " pod="openshift-marketplace/certified-operators-mxr5x" Jan 04 11:50:29 crc kubenswrapper[5003]: E0104 11:50:29.764906 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:30.264881317 +0000 UTC m=+145.737911168 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.782625 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gjbcx" Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.830035 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-984zr"] Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.831948 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-984zr" Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.853403 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-984zr"] Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.865842 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4141980-5ab0-4976-81aa-80a2245ae245-utilities\") pod \"certified-operators-mxr5x\" (UID: \"b4141980-5ab0-4976-81aa-80a2245ae245\") " pod="openshift-marketplace/certified-operators-mxr5x" Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.865897 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4141980-5ab0-4976-81aa-80a2245ae245-catalog-content\") pod \"certified-operators-mxr5x\" (UID: \"b4141980-5ab0-4976-81aa-80a2245ae245\") " pod="openshift-marketplace/certified-operators-mxr5x" Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.865933 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htccq\" (UniqueName: \"kubernetes.io/projected/b4141980-5ab0-4976-81aa-80a2245ae245-kube-api-access-htccq\") pod \"certified-operators-mxr5x\" (UID: \"b4141980-5ab0-4976-81aa-80a2245ae245\") " pod="openshift-marketplace/certified-operators-mxr5x" Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.865971 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:29 crc kubenswrapper[5003]: E0104 11:50:29.866286 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:30.366273223 +0000 UTC m=+145.839303064 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.866515 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4141980-5ab0-4976-81aa-80a2245ae245-catalog-content\") pod \"certified-operators-mxr5x\" (UID: \"b4141980-5ab0-4976-81aa-80a2245ae245\") " pod="openshift-marketplace/certified-operators-mxr5x" Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.866763 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4141980-5ab0-4976-81aa-80a2245ae245-utilities\") pod \"certified-operators-mxr5x\" (UID: \"b4141980-5ab0-4976-81aa-80a2245ae245\") " pod="openshift-marketplace/certified-operators-mxr5x" Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.925729 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htccq\" (UniqueName: \"kubernetes.io/projected/b4141980-5ab0-4976-81aa-80a2245ae245-kube-api-access-htccq\") pod \"certified-operators-mxr5x\" (UID: \"b4141980-5ab0-4976-81aa-80a2245ae245\") " pod="openshift-marketplace/certified-operators-mxr5x" Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.961994 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mxr5x" Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.968459 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.968643 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43dd1701-8159-424a-b98b-c3dd5f2b9ad8-catalog-content\") pod \"community-operators-984zr\" (UID: \"43dd1701-8159-424a-b98b-c3dd5f2b9ad8\") " pod="openshift-marketplace/community-operators-984zr" Jan 04 11:50:29 crc kubenswrapper[5003]: E0104 11:50:29.968683 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:30.468642046 +0000 UTC m=+145.941671887 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.968751 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcnfb\" (UniqueName: \"kubernetes.io/projected/43dd1701-8159-424a-b98b-c3dd5f2b9ad8-kube-api-access-qcnfb\") pod \"community-operators-984zr\" (UID: \"43dd1701-8159-424a-b98b-c3dd5f2b9ad8\") " pod="openshift-marketplace/community-operators-984zr" Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.968950 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.969078 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43dd1701-8159-424a-b98b-c3dd5f2b9ad8-utilities\") pod \"community-operators-984zr\" (UID: \"43dd1701-8159-424a-b98b-c3dd5f2b9ad8\") " pod="openshift-marketplace/community-operators-984zr" Jan 04 11:50:29 crc kubenswrapper[5003]: E0104 11:50:29.969562 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:30.46953545 +0000 UTC m=+145.942565291 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:29 crc kubenswrapper[5003]: I0104 11:50:29.970131 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-87wfq" event={"ID":"1571ddc2-e129-494e-a797-2a5ac2e94f94","Type":"ContainerStarted","Data":"265aa9983fce5d8bb08bed4edbf0105df71a4927220b6f1ea618a82740859fd0"} Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.015068 5003 generic.go:334] "Generic (PLEG): container finished" podID="62eb9818-98b0-40f2-9629-78923c5b112f" containerID="f795377417f758c426b50ad53d762edc6749c0ca053dfb5f0998bb4671488279" exitCode=0 Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.015142 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cd6dv" event={"ID":"62eb9818-98b0-40f2-9629-78923c5b112f","Type":"ContainerDied","Data":"f795377417f758c426b50ad53d762edc6749c0ca053dfb5f0998bb4671488279"} Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.015177 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cd6dv" event={"ID":"62eb9818-98b0-40f2-9629-78923c5b112f","Type":"ContainerStarted","Data":"862458dc791d8668e28d5bd403f9fc6c56d2c0700316bdc8870573664b69fa5c"} Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.016663 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cd6dv" Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.041644 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hsgvz"] Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.050836 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hsgvz" Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.060711 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hsgvz"] Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.062998 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-stxr8" event={"ID":"ca95e54c-a1d3-40a8-9bb4-fa314e9fe161","Type":"ContainerStarted","Data":"0ac7a7c95f8843f372433321bcba42edb7563cf19a4172b34d237c67d88b75e1"} Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.073699 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.074101 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43dd1701-8159-424a-b98b-c3dd5f2b9ad8-utilities\") pod \"community-operators-984zr\" (UID: \"43dd1701-8159-424a-b98b-c3dd5f2b9ad8\") " pod="openshift-marketplace/community-operators-984zr" Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.074189 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43dd1701-8159-424a-b98b-c3dd5f2b9ad8-catalog-content\") pod \"community-operators-984zr\" (UID: \"43dd1701-8159-424a-b98b-c3dd5f2b9ad8\") " pod="openshift-marketplace/community-operators-984zr" Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.074231 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcnfb\" (UniqueName: \"kubernetes.io/projected/43dd1701-8159-424a-b98b-c3dd5f2b9ad8-kube-api-access-qcnfb\") pod \"community-operators-984zr\" (UID: \"43dd1701-8159-424a-b98b-c3dd5f2b9ad8\") " pod="openshift-marketplace/community-operators-984zr" Jan 04 11:50:30 crc kubenswrapper[5003]: E0104 11:50:30.074930 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:30.574908964 +0000 UTC m=+146.047938805 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.075620 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43dd1701-8159-424a-b98b-c3dd5f2b9ad8-utilities\") pod \"community-operators-984zr\" (UID: \"43dd1701-8159-424a-b98b-c3dd5f2b9ad8\") " pod="openshift-marketplace/community-operators-984zr" Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.075995 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43dd1701-8159-424a-b98b-c3dd5f2b9ad8-catalog-content\") pod \"community-operators-984zr\" (UID: \"43dd1701-8159-424a-b98b-c3dd5f2b9ad8\") " pod="openshift-marketplace/community-operators-984zr" Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.091737 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cd6dv" podStartSLOduration=122.091701636 podStartE2EDuration="2m2.091701636s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:30.071147823 +0000 UTC m=+145.544177664" watchObservedRunningTime="2026-01-04 11:50:30.091701636 +0000 UTC m=+145.564731487" Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.121572 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-stxr8" podStartSLOduration=122.121542398 podStartE2EDuration="2m2.121542398s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:30.105478076 +0000 UTC m=+145.578507927" watchObservedRunningTime="2026-01-04 11:50:30.121542398 +0000 UTC m=+145.594572239" Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.126213 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-8qsxn" event={"ID":"664dcbda-0324-4957-84e5-309cbd624afc","Type":"ContainerStarted","Data":"3e236012bbc4d8d4ab7b988352b9efb5957e91d988dee153d473dd9dcbedb13b"} Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.142441 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcnfb\" (UniqueName: \"kubernetes.io/projected/43dd1701-8159-424a-b98b-c3dd5f2b9ad8-kube-api-access-qcnfb\") pod \"community-operators-984zr\" (UID: \"43dd1701-8159-424a-b98b-c3dd5f2b9ad8\") " pod="openshift-marketplace/community-operators-984zr" Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.160300 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-m6xcl" event={"ID":"fad64100-7cb6-4457-9b74-ccc0cdf1dbb1","Type":"ContainerStarted","Data":"eacbcaa6129ad886bd2c0d4a6ad42d438a67109e876818fe359a25ea80bff258"} Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.161658 5003 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-m6xcl container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.32:8080/healthz\": dial tcp 10.217.0.32:8080: connect: connection refused" start-of-body= Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.161695 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-m6xcl" podUID="fad64100-7cb6-4457-9b74-ccc0cdf1dbb1" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.32:8080/healthz\": dial tcp 10.217.0.32:8080: connect: connection refused" Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.169209 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-984zr" Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.177238 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7aedb7cd-d9d3-441b-af72-a11878071f3f-catalog-content\") pod \"certified-operators-hsgvz\" (UID: \"7aedb7cd-d9d3-441b-af72-a11878071f3f\") " pod="openshift-marketplace/certified-operators-hsgvz" Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.177362 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7aedb7cd-d9d3-441b-af72-a11878071f3f-utilities\") pod \"certified-operators-hsgvz\" (UID: \"7aedb7cd-d9d3-441b-af72-a11878071f3f\") " pod="openshift-marketplace/certified-operators-hsgvz" Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.177471 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.177590 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c227h\" (UniqueName: \"kubernetes.io/projected/7aedb7cd-d9d3-441b-af72-a11878071f3f-kube-api-access-c227h\") pod \"certified-operators-hsgvz\" (UID: \"7aedb7cd-d9d3-441b-af72-a11878071f3f\") " pod="openshift-marketplace/certified-operators-hsgvz" Jan 04 11:50:30 crc kubenswrapper[5003]: E0104 11:50:30.179368 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:30.679353393 +0000 UTC m=+146.152383234 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.218229 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-r7kd8" event={"ID":"50f1b88a-1e58-4f1c-b720-d0ac61cb923c","Type":"ContainerStarted","Data":"f14101081bec3ff6d224b4f7049ba7f894e623527e26317e16328f86a5d2be3a"} Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.233566 5003 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.280603 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.280975 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7aedb7cd-d9d3-441b-af72-a11878071f3f-catalog-content\") pod \"certified-operators-hsgvz\" (UID: \"7aedb7cd-d9d3-441b-af72-a11878071f3f\") " pod="openshift-marketplace/certified-operators-hsgvz" Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.281216 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7aedb7cd-d9d3-441b-af72-a11878071f3f-utilities\") pod \"certified-operators-hsgvz\" (UID: \"7aedb7cd-d9d3-441b-af72-a11878071f3f\") " pod="openshift-marketplace/certified-operators-hsgvz" Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.281301 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c227h\" (UniqueName: \"kubernetes.io/projected/7aedb7cd-d9d3-441b-af72-a11878071f3f-kube-api-access-c227h\") pod \"certified-operators-hsgvz\" (UID: \"7aedb7cd-d9d3-441b-af72-a11878071f3f\") " pod="openshift-marketplace/certified-operators-hsgvz" Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.282500 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-db64n" event={"ID":"5271e5a8-5812-491f-86d3-8cd4504ef32f","Type":"ContainerStarted","Data":"cb538d5e86a211bcd5dfd1f0d3400d0283d26fe29cb8679e25f116a4558d70c5"} Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.283299 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7aedb7cd-d9d3-441b-af72-a11878071f3f-catalog-content\") pod \"certified-operators-hsgvz\" (UID: \"7aedb7cd-d9d3-441b-af72-a11878071f3f\") " pod="openshift-marketplace/certified-operators-hsgvz" Jan 04 11:50:30 crc kubenswrapper[5003]: E0104 11:50:30.283800 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:30.783773471 +0000 UTC m=+146.256803492 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.284208 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7aedb7cd-d9d3-441b-af72-a11878071f3f-utilities\") pod \"certified-operators-hsgvz\" (UID: \"7aedb7cd-d9d3-441b-af72-a11878071f3f\") " pod="openshift-marketplace/certified-operators-hsgvz" Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.285367 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-8qsxn" podStartSLOduration=122.285354703 podStartE2EDuration="2m2.285354703s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:30.200595294 +0000 UTC m=+145.673625135" watchObservedRunningTime="2026-01-04 11:50:30.285354703 +0000 UTC m=+145.758384544" Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.285722 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gjbcx"] Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.299216 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dphl6" event={"ID":"a5f82de9-292f-47ab-9590-00f63a73a25d","Type":"ContainerStarted","Data":"81c592ed133d8cbc78c62de97fd99c76171a443d7efbd1e4ee4bdf731927c910"} Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.325860 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c227h\" (UniqueName: \"kubernetes.io/projected/7aedb7cd-d9d3-441b-af72-a11878071f3f-kube-api-access-c227h\") pod \"certified-operators-hsgvz\" (UID: \"7aedb7cd-d9d3-441b-af72-a11878071f3f\") " pod="openshift-marketplace/certified-operators-hsgvz" Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.356280 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-dphl6" podStartSLOduration=122.35625708 podStartE2EDuration="2m2.35625708s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:30.354052921 +0000 UTC m=+145.827082762" watchObservedRunningTime="2026-01-04 11:50:30.35625708 +0000 UTC m=+145.829286931" Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.386807 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.389709 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2g644" event={"ID":"dfb130da-bcbf-46d8-8e7e-2b1f17fe71ce","Type":"ContainerStarted","Data":"4cba335ecdcfebfec4c57620d657845b2c68fb685ec070eb7a34be1054ec8cfe"} Jan 04 11:50:30 crc kubenswrapper[5003]: E0104 11:50:30.397521 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:30.897487459 +0000 UTC m=+146.370517300 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.409832 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-bjp7f" event={"ID":"26a96160-4284-44b2-89de-ea0b0e8b8a21","Type":"ContainerStarted","Data":"c14ae3c1cb53bea363d22dfdc12b48a9cbf4ba1d7161e9a11b166479e9512995"} Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.409887 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-bjp7f" event={"ID":"26a96160-4284-44b2-89de-ea0b0e8b8a21","Type":"ContainerStarted","Data":"23dbeae512feed53ef89e17123eb27602ce0975947d82c19395019ee4b9599ff"} Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.425441 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6smzv" event={"ID":"a7865114-eb86-4994-b91d-3c39f9cee6a5","Type":"ContainerStarted","Data":"e48ac8f3244c1c8c939c5aa0503e16591e120594419a80ffed626c735193112d"} Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.436232 5003 patch_prober.go:28] interesting pod/downloads-7954f5f757-pgx7w container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.26:8080/\": dial tcp 10.217.0.26:8080: connect: connection refused" start-of-body= Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.436316 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-pgx7w" podUID="92ada4c8-acb9-4740-85dc-815cd8a3b028" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.26:8080/\": dial tcp 10.217.0.26:8080: connect: connection refused" Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.444993 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hsgvz" Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.446102 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.467575 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-6smzv" podStartSLOduration=122.467550923 podStartE2EDuration="2m2.467550923s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:30.452780976 +0000 UTC m=+145.925810807" watchObservedRunningTime="2026-01-04 11:50:30.467550923 +0000 UTC m=+145.940580764" Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.474155 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mxr5x"] Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.498738 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:30 crc kubenswrapper[5003]: E0104 11:50:30.499104 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:30.999075931 +0000 UTC m=+146.472105772 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.499293 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.499835 5003 patch_prober.go:28] interesting pod/router-default-5444994796-dkgwj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 04 11:50:30 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Jan 04 11:50:30 crc kubenswrapper[5003]: [+]process-running ok Jan 04 11:50:30 crc kubenswrapper[5003]: healthz check failed Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.499890 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-dkgwj" podUID="87bcf788-c637-4ed6-a5da-650979a40203" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 04 11:50:30 crc kubenswrapper[5003]: E0104 11:50:30.500592 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:31.000579161 +0000 UTC m=+146.473608992 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.536147 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-b5flr" podStartSLOduration=122.536124937 podStartE2EDuration="2m2.536124937s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:30.487662844 +0000 UTC m=+145.960692695" watchObservedRunningTime="2026-01-04 11:50:30.536124937 +0000 UTC m=+146.009154778" Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.605806 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:30 crc kubenswrapper[5003]: E0104 11:50:30.606921 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:31.106901701 +0000 UTC m=+146.579931542 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.713320 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:30 crc kubenswrapper[5003]: E0104 11:50:30.713912 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:31.213891058 +0000 UTC m=+146.686920889 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.821153 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:30 crc kubenswrapper[5003]: E0104 11:50:30.821792 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-04 11:50:31.321769789 +0000 UTC m=+146.794799630 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.907353 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hsgvz"] Jan 04 11:50:30 crc kubenswrapper[5003]: I0104 11:50:30.924173 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:30 crc kubenswrapper[5003]: E0104 11:50:30.924612 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-04 11:50:31.424596144 +0000 UTC m=+146.897625985 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-d2kjq" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.009236 5003 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-01-04T11:50:30.233874949Z","Handler":null,"Name":""} Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.013842 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-984zr"] Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.017945 5003 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.017986 5003 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.025849 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.037111 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.127512 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.141203 5003 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.141272 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.176491 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-d2kjq\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.223215 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.439787 5003 generic.go:334] "Generic (PLEG): container finished" podID="b4141980-5ab0-4976-81aa-80a2245ae245" containerID="f5048e0ad3480bc9ae224ba2856a499ea7b1d444ef5354f53f40c842192c1030" exitCode=0 Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.439916 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mxr5x" event={"ID":"b4141980-5ab0-4976-81aa-80a2245ae245","Type":"ContainerDied","Data":"f5048e0ad3480bc9ae224ba2856a499ea7b1d444ef5354f53f40c842192c1030"} Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.439968 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mxr5x" event={"ID":"b4141980-5ab0-4976-81aa-80a2245ae245","Type":"ContainerStarted","Data":"fa6edab9782fc6034e67a540a2aacc5f26263ac614cc4a28eb01ad30a6c0ff85"} Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.442953 5003 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.456894 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-bjp7f" event={"ID":"26a96160-4284-44b2-89de-ea0b0e8b8a21","Type":"ContainerStarted","Data":"026feec7e80d1ae713214f0cb1305d577731689d0ccd42077a6c758ded053328"} Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.456957 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-bjp7f" event={"ID":"26a96160-4284-44b2-89de-ea0b0e8b8a21","Type":"ContainerStarted","Data":"2714cf60c117f2caef4592127a49c47eb6e1e24314cf6ceb1b8f7a5288918678"} Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.459387 5003 generic.go:334] "Generic (PLEG): container finished" podID="7aedb7cd-d9d3-441b-af72-a11878071f3f" containerID="ad8a0adcfae03e07cb6127b223b5dd07a3c3ccf629910df1c345e40b0d19f186" exitCode=0 Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.459474 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsgvz" event={"ID":"7aedb7cd-d9d3-441b-af72-a11878071f3f","Type":"ContainerDied","Data":"ad8a0adcfae03e07cb6127b223b5dd07a3c3ccf629910df1c345e40b0d19f186"} Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.459513 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsgvz" event={"ID":"7aedb7cd-d9d3-441b-af72-a11878071f3f","Type":"ContainerStarted","Data":"b09d955118e43a57b263d8e510d2ae6008d29dac544ab2c9c9383aedc8717fca"} Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.465857 5003 generic.go:334] "Generic (PLEG): container finished" podID="6c5933bf-668f-4062-998b-3b2c5ad3a811" containerID="6f9a684eb697ec549e409cdf6475b17429be57144e4bb92eec576de2a2558240" exitCode=0 Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.466159 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gjbcx" event={"ID":"6c5933bf-668f-4062-998b-3b2c5ad3a811","Type":"ContainerDied","Data":"6f9a684eb697ec549e409cdf6475b17429be57144e4bb92eec576de2a2558240"} Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.466210 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gjbcx" event={"ID":"6c5933bf-668f-4062-998b-3b2c5ad3a811","Type":"ContainerStarted","Data":"a7e21f8e1266e0bd14e8e195ea5a9cc5166348407feaa6d60aaf96bdb65b37ad"} Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.480092 5003 generic.go:334] "Generic (PLEG): container finished" podID="43dd1701-8159-424a-b98b-c3dd5f2b9ad8" containerID="1c5a6007e5547e4070348ab72205ec377b3baf6023c8434eb672011cc07ec99b" exitCode=0 Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.482084 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-984zr" event={"ID":"43dd1701-8159-424a-b98b-c3dd5f2b9ad8","Type":"ContainerDied","Data":"1c5a6007e5547e4070348ab72205ec377b3baf6023c8434eb672011cc07ec99b"} Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.482131 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-984zr" event={"ID":"43dd1701-8159-424a-b98b-c3dd5f2b9ad8","Type":"ContainerStarted","Data":"0fd5f32bb6da7798069bba37ff4ed52560fe10d867142299b1a89a09a9375782"} Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.488165 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-d2kjq"] Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.488385 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-m6xcl" Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.495677 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-bjp7f" podStartSLOduration=11.495662531 podStartE2EDuration="11.495662531s" podCreationTimestamp="2026-01-04 11:50:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:31.494617373 +0000 UTC m=+146.967647224" watchObservedRunningTime="2026-01-04 11:50:31.495662531 +0000 UTC m=+146.968692372" Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.503375 5003 patch_prober.go:28] interesting pod/router-default-5444994796-dkgwj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 04 11:50:31 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Jan 04 11:50:31 crc kubenswrapper[5003]: [+]process-running ok Jan 04 11:50:31 crc kubenswrapper[5003]: healthz check failed Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.503423 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-dkgwj" podUID="87bcf788-c637-4ed6-a5da-650979a40203" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.640028 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7ghcn"] Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.641124 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7ghcn" Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.648567 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.679478 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7ghcn"] Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.747649 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf4075fd-a261-4d15-b6e3-02d8c346fe74-catalog-content\") pod \"redhat-marketplace-7ghcn\" (UID: \"cf4075fd-a261-4d15-b6e3-02d8c346fe74\") " pod="openshift-marketplace/redhat-marketplace-7ghcn" Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.747713 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf4075fd-a261-4d15-b6e3-02d8c346fe74-utilities\") pod \"redhat-marketplace-7ghcn\" (UID: \"cf4075fd-a261-4d15-b6e3-02d8c346fe74\") " pod="openshift-marketplace/redhat-marketplace-7ghcn" Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.747738 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xbgtq\" (UniqueName: \"kubernetes.io/projected/cf4075fd-a261-4d15-b6e3-02d8c346fe74-kube-api-access-xbgtq\") pod \"redhat-marketplace-7ghcn\" (UID: \"cf4075fd-a261-4d15-b6e3-02d8c346fe74\") " pod="openshift-marketplace/redhat-marketplace-7ghcn" Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.848696 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf4075fd-a261-4d15-b6e3-02d8c346fe74-utilities\") pod \"redhat-marketplace-7ghcn\" (UID: \"cf4075fd-a261-4d15-b6e3-02d8c346fe74\") " pod="openshift-marketplace/redhat-marketplace-7ghcn" Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.848754 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xbgtq\" (UniqueName: \"kubernetes.io/projected/cf4075fd-a261-4d15-b6e3-02d8c346fe74-kube-api-access-xbgtq\") pod \"redhat-marketplace-7ghcn\" (UID: \"cf4075fd-a261-4d15-b6e3-02d8c346fe74\") " pod="openshift-marketplace/redhat-marketplace-7ghcn" Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.848842 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.848948 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.848971 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf4075fd-a261-4d15-b6e3-02d8c346fe74-catalog-content\") pod \"redhat-marketplace-7ghcn\" (UID: \"cf4075fd-a261-4d15-b6e3-02d8c346fe74\") " pod="openshift-marketplace/redhat-marketplace-7ghcn" Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.849895 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf4075fd-a261-4d15-b6e3-02d8c346fe74-catalog-content\") pod \"redhat-marketplace-7ghcn\" (UID: \"cf4075fd-a261-4d15-b6e3-02d8c346fe74\") " pod="openshift-marketplace/redhat-marketplace-7ghcn" Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.850786 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf4075fd-a261-4d15-b6e3-02d8c346fe74-utilities\") pod \"redhat-marketplace-7ghcn\" (UID: \"cf4075fd-a261-4d15-b6e3-02d8c346fe74\") " pod="openshift-marketplace/redhat-marketplace-7ghcn" Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.857446 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.858861 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.876379 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xbgtq\" (UniqueName: \"kubernetes.io/projected/cf4075fd-a261-4d15-b6e3-02d8c346fe74-kube-api-access-xbgtq\") pod \"redhat-marketplace-7ghcn\" (UID: \"cf4075fd-a261-4d15-b6e3-02d8c346fe74\") " pod="openshift-marketplace/redhat-marketplace-7ghcn" Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.950738 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.950900 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.955000 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.955886 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:31 crc kubenswrapper[5003]: I0104 11:50:31.977711 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7ghcn" Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.027524 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-87zs8"] Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.034694 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.035003 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-87zs8" Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.040164 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-87zs8"] Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.048653 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.073104 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.154159 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c02a9169-294b-4986-823b-e77965bc257d-utilities\") pod \"redhat-marketplace-87zs8\" (UID: \"c02a9169-294b-4986-823b-e77965bc257d\") " pod="openshift-marketplace/redhat-marketplace-87zs8" Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.154219 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c02a9169-294b-4986-823b-e77965bc257d-catalog-content\") pod \"redhat-marketplace-87zs8\" (UID: \"c02a9169-294b-4986-823b-e77965bc257d\") " pod="openshift-marketplace/redhat-marketplace-87zs8" Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.154332 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qv7d7\" (UniqueName: \"kubernetes.io/projected/c02a9169-294b-4986-823b-e77965bc257d-kube-api-access-qv7d7\") pod \"redhat-marketplace-87zs8\" (UID: \"c02a9169-294b-4986-823b-e77965bc257d\") " pod="openshift-marketplace/redhat-marketplace-87zs8" Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.256581 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c02a9169-294b-4986-823b-e77965bc257d-catalog-content\") pod \"redhat-marketplace-87zs8\" (UID: \"c02a9169-294b-4986-823b-e77965bc257d\") " pod="openshift-marketplace/redhat-marketplace-87zs8" Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.257220 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qv7d7\" (UniqueName: \"kubernetes.io/projected/c02a9169-294b-4986-823b-e77965bc257d-kube-api-access-qv7d7\") pod \"redhat-marketplace-87zs8\" (UID: \"c02a9169-294b-4986-823b-e77965bc257d\") " pod="openshift-marketplace/redhat-marketplace-87zs8" Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.257257 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c02a9169-294b-4986-823b-e77965bc257d-utilities\") pod \"redhat-marketplace-87zs8\" (UID: \"c02a9169-294b-4986-823b-e77965bc257d\") " pod="openshift-marketplace/redhat-marketplace-87zs8" Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.259665 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c02a9169-294b-4986-823b-e77965bc257d-utilities\") pod \"redhat-marketplace-87zs8\" (UID: \"c02a9169-294b-4986-823b-e77965bc257d\") " pod="openshift-marketplace/redhat-marketplace-87zs8" Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.263499 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c02a9169-294b-4986-823b-e77965bc257d-catalog-content\") pod \"redhat-marketplace-87zs8\" (UID: \"c02a9169-294b-4986-823b-e77965bc257d\") " pod="openshift-marketplace/redhat-marketplace-87zs8" Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.279667 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qv7d7\" (UniqueName: \"kubernetes.io/projected/c02a9169-294b-4986-823b-e77965bc257d-kube-api-access-qv7d7\") pod \"redhat-marketplace-87zs8\" (UID: \"c02a9169-294b-4986-823b-e77965bc257d\") " pod="openshift-marketplace/redhat-marketplace-87zs8" Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.411123 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-87zs8" Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.457359 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7ghcn"] Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.501971 5003 patch_prober.go:28] interesting pod/router-default-5444994796-dkgwj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 04 11:50:32 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Jan 04 11:50:32 crc kubenswrapper[5003]: [+]process-running ok Jan 04 11:50:32 crc kubenswrapper[5003]: healthz check failed Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.502747 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-dkgwj" podUID="87bcf788-c637-4ed6-a5da-650979a40203" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.546583 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"0fe7c04caf47f86da4a3eb02b0e4b1264ee388bb947394ce26a745454c1e5614"} Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.556495 5003 generic.go:334] "Generic (PLEG): container finished" podID="1886cfcf-db6d-49b2-8f0a-4637996373db" containerID="00707828a2f582e1f0ffd5a56dba21044e3b453f514e280d8f8df9f88e721644" exitCode=0 Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.556568 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-q4d9v" event={"ID":"1886cfcf-db6d-49b2-8f0a-4637996373db","Type":"ContainerDied","Data":"00707828a2f582e1f0ffd5a56dba21044e3b453f514e280d8f8df9f88e721644"} Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.561698 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" event={"ID":"edc8a447-cc41-4241-be6a-957fa4255108","Type":"ContainerStarted","Data":"006b48442dcfe367e3322d88efc6489f287d943e85c97e918c7641a5088ed7ce"} Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.561737 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.561749 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" event={"ID":"edc8a447-cc41-4241-be6a-957fa4255108","Type":"ContainerStarted","Data":"9a835883d2863c3efe8cba0e6ea21d17af5c8eec58e6c0d4bcffd669d93ec8ff"} Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.578134 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cd6dv" Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.624319 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" podStartSLOduration=124.624263711 podStartE2EDuration="2m4.624263711s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:32.602973418 +0000 UTC m=+148.076003269" watchObservedRunningTime="2026-01-04 11:50:32.624263711 +0000 UTC m=+148.097293552" Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.634573 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9t6r8"] Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.636361 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9t6r8" Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.638195 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.667548 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9t6r8"] Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.763518 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/527e50de-5345-49f4-9ce8-3aaf8d446bed-utilities\") pod \"redhat-operators-9t6r8\" (UID: \"527e50de-5345-49f4-9ce8-3aaf8d446bed\") " pod="openshift-marketplace/redhat-operators-9t6r8" Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.763925 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/527e50de-5345-49f4-9ce8-3aaf8d446bed-catalog-content\") pod \"redhat-operators-9t6r8\" (UID: \"527e50de-5345-49f4-9ce8-3aaf8d446bed\") " pod="openshift-marketplace/redhat-operators-9t6r8" Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.763972 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kjrn\" (UniqueName: \"kubernetes.io/projected/527e50de-5345-49f4-9ce8-3aaf8d446bed-kube-api-access-6kjrn\") pod \"redhat-operators-9t6r8\" (UID: \"527e50de-5345-49f4-9ce8-3aaf8d446bed\") " pod="openshift-marketplace/redhat-operators-9t6r8" Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.774667 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:32 crc kubenswrapper[5003]: I0104 11:50:32.774720 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.033304 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-v47x4"] Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.034406 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v47x4" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.038055 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-v47x4"] Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.322525 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-2jt99" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.386596 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kjrn\" (UniqueName: \"kubernetes.io/projected/527e50de-5345-49f4-9ce8-3aaf8d446bed-kube-api-access-6kjrn\") pod \"redhat-operators-9t6r8\" (UID: \"527e50de-5345-49f4-9ce8-3aaf8d446bed\") " pod="openshift-marketplace/redhat-operators-9t6r8" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.386723 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/223eef6f-9ab2-48c7-9d00-c812abc17e96-utilities\") pod \"redhat-operators-v47x4\" (UID: \"223eef6f-9ab2-48c7-9d00-c812abc17e96\") " pod="openshift-marketplace/redhat-operators-v47x4" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.386749 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-565jz\" (UniqueName: \"kubernetes.io/projected/223eef6f-9ab2-48c7-9d00-c812abc17e96-kube-api-access-565jz\") pod \"redhat-operators-v47x4\" (UID: \"223eef6f-9ab2-48c7-9d00-c812abc17e96\") " pod="openshift-marketplace/redhat-operators-v47x4" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.386861 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/223eef6f-9ab2-48c7-9d00-c812abc17e96-catalog-content\") pod \"redhat-operators-v47x4\" (UID: \"223eef6f-9ab2-48c7-9d00-c812abc17e96\") " pod="openshift-marketplace/redhat-operators-v47x4" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.386894 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/527e50de-5345-49f4-9ce8-3aaf8d446bed-catalog-content\") pod \"redhat-operators-9t6r8\" (UID: \"527e50de-5345-49f4-9ce8-3aaf8d446bed\") " pod="openshift-marketplace/redhat-operators-9t6r8" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.386915 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/527e50de-5345-49f4-9ce8-3aaf8d446bed-utilities\") pod \"redhat-operators-9t6r8\" (UID: \"527e50de-5345-49f4-9ce8-3aaf8d446bed\") " pod="openshift-marketplace/redhat-operators-9t6r8" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.387457 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/527e50de-5345-49f4-9ce8-3aaf8d446bed-utilities\") pod \"redhat-operators-9t6r8\" (UID: \"527e50de-5345-49f4-9ce8-3aaf8d446bed\") " pod="openshift-marketplace/redhat-operators-9t6r8" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.387498 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-2jt99" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.388377 5003 patch_prober.go:28] interesting pod/console-f9d7485db-2jt99 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.39:8443/health\": dial tcp 10.217.0.39:8443: connect: connection refused" start-of-body= Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.388428 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-2jt99" podUID="b43f39ef-60cd-44be-8061-715fbf71a36b" containerName="console" probeResult="failure" output="Get \"https://10.217.0.39:8443/health\": dial tcp 10.217.0.39:8443: connect: connection refused" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.390730 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/527e50de-5345-49f4-9ce8-3aaf8d446bed-catalog-content\") pod \"redhat-operators-9t6r8\" (UID: \"527e50de-5345-49f4-9ce8-3aaf8d446bed\") " pod="openshift-marketplace/redhat-operators-9t6r8" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.392486 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.392520 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.405044 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.405933 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.416952 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kjrn\" (UniqueName: \"kubernetes.io/projected/527e50de-5345-49f4-9ce8-3aaf8d446bed-kube-api-access-6kjrn\") pod \"redhat-operators-9t6r8\" (UID: \"527e50de-5345-49f4-9ce8-3aaf8d446bed\") " pod="openshift-marketplace/redhat-operators-9t6r8" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.495813 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.497400 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/223eef6f-9ab2-48c7-9d00-c812abc17e96-utilities\") pod \"redhat-operators-v47x4\" (UID: \"223eef6f-9ab2-48c7-9d00-c812abc17e96\") " pod="openshift-marketplace/redhat-operators-v47x4" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.505120 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-565jz\" (UniqueName: \"kubernetes.io/projected/223eef6f-9ab2-48c7-9d00-c812abc17e96-kube-api-access-565jz\") pod \"redhat-operators-v47x4\" (UID: \"223eef6f-9ab2-48c7-9d00-c812abc17e96\") " pod="openshift-marketplace/redhat-operators-v47x4" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.508687 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/223eef6f-9ab2-48c7-9d00-c812abc17e96-catalog-content\") pod \"redhat-operators-v47x4\" (UID: \"223eef6f-9ab2-48c7-9d00-c812abc17e96\") " pod="openshift-marketplace/redhat-operators-v47x4" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.506550 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/223eef6f-9ab2-48c7-9d00-c812abc17e96-utilities\") pod \"redhat-operators-v47x4\" (UID: \"223eef6f-9ab2-48c7-9d00-c812abc17e96\") " pod="openshift-marketplace/redhat-operators-v47x4" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.510434 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/223eef6f-9ab2-48c7-9d00-c812abc17e96-catalog-content\") pod \"redhat-operators-v47x4\" (UID: \"223eef6f-9ab2-48c7-9d00-c812abc17e96\") " pod="openshift-marketplace/redhat-operators-v47x4" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.511108 5003 patch_prober.go:28] interesting pod/router-default-5444994796-dkgwj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 04 11:50:33 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Jan 04 11:50:33 crc kubenswrapper[5003]: [+]process-running ok Jan 04 11:50:33 crc kubenswrapper[5003]: healthz check failed Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.511842 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-dkgwj" podUID="87bcf788-c637-4ed6-a5da-650979a40203" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.522513 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-dkgwj" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.539965 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-565jz\" (UniqueName: \"kubernetes.io/projected/223eef6f-9ab2-48c7-9d00-c812abc17e96-kube-api-access-565jz\") pod \"redhat-operators-v47x4\" (UID: \"223eef6f-9ab2-48c7-9d00-c812abc17e96\") " pod="openshift-marketplace/redhat-operators-v47x4" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.585544 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9t6r8" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.621783 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7ghcn" event={"ID":"cf4075fd-a261-4d15-b6e3-02d8c346fe74","Type":"ContainerStarted","Data":"4ae870a1d1a128e5e6790914ad76aa86367e90369f3a4898901411bab9da3a34"} Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.624003 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"ad0e995c9f6c0ce04dd0e11e60388d96ba03367024a043e8efc32bcfc7bfe3fb"} Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.626817 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"7d9b3c2c7306129b987fa8e995ce637af1ec55530e8633e34717ecaf7131c810"} Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.646778 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v47x4" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.648547 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jlfc5" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.657219 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-tlh5b" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.699061 5003 patch_prober.go:28] interesting pod/downloads-7954f5f757-pgx7w container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.26:8080/\": dial tcp 10.217.0.26:8080: connect: connection refused" start-of-body= Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.699130 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-pgx7w" podUID="92ada4c8-acb9-4740-85dc-815cd8a3b028" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.26:8080/\": dial tcp 10.217.0.26:8080: connect: connection refused" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.699556 5003 patch_prober.go:28] interesting pod/downloads-7954f5f757-pgx7w container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.26:8080/\": dial tcp 10.217.0.26:8080: connect: connection refused" start-of-body= Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.699579 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-pgx7w" podUID="92ada4c8-acb9-4740-85dc-815cd8a3b028" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.26:8080/\": dial tcp 10.217.0.26:8080: connect: connection refused" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.823131 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.824485 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.830579 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.830820 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.867598 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.918129 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-87zs8"] Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.937277 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/48cc6895-e764-4a66-a4ef-c89882ec8ee3-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"48cc6895-e764-4a66-a4ef-c89882ec8ee3\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 04 11:50:33 crc kubenswrapper[5003]: I0104 11:50:33.937622 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/48cc6895-e764-4a66-a4ef-c89882ec8ee3-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"48cc6895-e764-4a66-a4ef-c89882ec8ee3\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.039118 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/48cc6895-e764-4a66-a4ef-c89882ec8ee3-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"48cc6895-e764-4a66-a4ef-c89882ec8ee3\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.039218 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/48cc6895-e764-4a66-a4ef-c89882ec8ee3-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"48cc6895-e764-4a66-a4ef-c89882ec8ee3\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.039631 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/48cc6895-e764-4a66-a4ef-c89882ec8ee3-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"48cc6895-e764-4a66-a4ef-c89882ec8ee3\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.083165 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/48cc6895-e764-4a66-a4ef-c89882ec8ee3-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"48cc6895-e764-4a66-a4ef-c89882ec8ee3\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.149584 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-q4d9v" Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.167815 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.169178 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9t6r8"] Jan 04 11:50:34 crc kubenswrapper[5003]: W0104 11:50:34.187214 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod527e50de_5345_49f4_9ce8_3aaf8d446bed.slice/crio-23b86fb9f2d3d9f5dae54cce4db63cef614b0934a302861c64d45303ba9aea1a WatchSource:0}: Error finding container 23b86fb9f2d3d9f5dae54cce4db63cef614b0934a302861c64d45303ba9aea1a: Status 404 returned error can't find the container with id 23b86fb9f2d3d9f5dae54cce4db63cef614b0934a302861c64d45303ba9aea1a Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.257026 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1886cfcf-db6d-49b2-8f0a-4637996373db-secret-volume\") pod \"1886cfcf-db6d-49b2-8f0a-4637996373db\" (UID: \"1886cfcf-db6d-49b2-8f0a-4637996373db\") " Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.257121 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-45x9h\" (UniqueName: \"kubernetes.io/projected/1886cfcf-db6d-49b2-8f0a-4637996373db-kube-api-access-45x9h\") pod \"1886cfcf-db6d-49b2-8f0a-4637996373db\" (UID: \"1886cfcf-db6d-49b2-8f0a-4637996373db\") " Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.257162 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1886cfcf-db6d-49b2-8f0a-4637996373db-config-volume\") pod \"1886cfcf-db6d-49b2-8f0a-4637996373db\" (UID: \"1886cfcf-db6d-49b2-8f0a-4637996373db\") " Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.258244 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1886cfcf-db6d-49b2-8f0a-4637996373db-config-volume" (OuterVolumeSpecName: "config-volume") pod "1886cfcf-db6d-49b2-8f0a-4637996373db" (UID: "1886cfcf-db6d-49b2-8f0a-4637996373db"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.266602 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1886cfcf-db6d-49b2-8f0a-4637996373db-kube-api-access-45x9h" (OuterVolumeSpecName: "kube-api-access-45x9h") pod "1886cfcf-db6d-49b2-8f0a-4637996373db" (UID: "1886cfcf-db6d-49b2-8f0a-4637996373db"). InnerVolumeSpecName "kube-api-access-45x9h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.278196 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1886cfcf-db6d-49b2-8f0a-4637996373db-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "1886cfcf-db6d-49b2-8f0a-4637996373db" (UID: "1886cfcf-db6d-49b2-8f0a-4637996373db"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.358537 5003 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1886cfcf-db6d-49b2-8f0a-4637996373db-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.358587 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-45x9h\" (UniqueName: \"kubernetes.io/projected/1886cfcf-db6d-49b2-8f0a-4637996373db-kube-api-access-45x9h\") on node \"crc\" DevicePath \"\"" Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.358600 5003 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1886cfcf-db6d-49b2-8f0a-4637996373db-config-volume\") on node \"crc\" DevicePath \"\"" Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.475306 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-v47x4"] Jan 04 11:50:34 crc kubenswrapper[5003]: W0104 11:50:34.505200 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod223eef6f_9ab2_48c7_9d00_c812abc17e96.slice/crio-96232ddbbd732c95410a895784734fda8644cda9cf1d1884d0d19cc20d9877f3 WatchSource:0}: Error finding container 96232ddbbd732c95410a895784734fda8644cda9cf1d1884d0d19cc20d9877f3: Status 404 returned error can't find the container with id 96232ddbbd732c95410a895784734fda8644cda9cf1d1884d0d19cc20d9877f3 Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.524229 5003 patch_prober.go:28] interesting pod/router-default-5444994796-dkgwj container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 04 11:50:34 crc kubenswrapper[5003]: [-]has-synced failed: reason withheld Jan 04 11:50:34 crc kubenswrapper[5003]: [+]process-running ok Jan 04 11:50:34 crc kubenswrapper[5003]: healthz check failed Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.524276 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-dkgwj" podUID="87bcf788-c637-4ed6-a5da-650979a40203" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.684723 5003 generic.go:334] "Generic (PLEG): container finished" podID="527e50de-5345-49f4-9ce8-3aaf8d446bed" containerID="343acf68591845aee2c7ff0467a9f0e37b6ccbad1d67095330035604ef2ae6e1" exitCode=0 Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.685268 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9t6r8" event={"ID":"527e50de-5345-49f4-9ce8-3aaf8d446bed","Type":"ContainerDied","Data":"343acf68591845aee2c7ff0467a9f0e37b6ccbad1d67095330035604ef2ae6e1"} Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.685307 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9t6r8" event={"ID":"527e50de-5345-49f4-9ce8-3aaf8d446bed","Type":"ContainerStarted","Data":"23b86fb9f2d3d9f5dae54cce4db63cef614b0934a302861c64d45303ba9aea1a"} Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.720613 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v47x4" event={"ID":"223eef6f-9ab2-48c7-9d00-c812abc17e96","Type":"ContainerStarted","Data":"96232ddbbd732c95410a895784734fda8644cda9cf1d1884d0d19cc20d9877f3"} Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.735621 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"c22ca48f846a44f04cfb9514eec929564dd6b34c4226dfde01d3b7dc27a58f34"} Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.756200 5003 generic.go:334] "Generic (PLEG): container finished" podID="c02a9169-294b-4986-823b-e77965bc257d" containerID="abd2cb511cd1288aaed0bddb31acd3b89ebb338f60ccfb59cf8e508d26742104" exitCode=0 Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.756274 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-87zs8" event={"ID":"c02a9169-294b-4986-823b-e77965bc257d","Type":"ContainerDied","Data":"abd2cb511cd1288aaed0bddb31acd3b89ebb338f60ccfb59cf8e508d26742104"} Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.756300 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-87zs8" event={"ID":"c02a9169-294b-4986-823b-e77965bc257d","Type":"ContainerStarted","Data":"d7da27be4b3233acf2b56bee12cfd53081a950c3cd2b9e050bcac0a19d467e5c"} Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.776325 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-q4d9v" Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.777255 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458785-q4d9v" event={"ID":"1886cfcf-db6d-49b2-8f0a-4637996373db","Type":"ContainerDied","Data":"3c000948d035b41adac65c55c144463550140d98bc28c8edadd0f15491cbd8ca"} Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.777310 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3c000948d035b41adac65c55c144463550140d98bc28c8edadd0f15491cbd8ca" Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.799276 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"20b2355277fb0235b24b1627f9f001b82d216528a7e454e1186669dbfe54d056"} Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.829086 5003 generic.go:334] "Generic (PLEG): container finished" podID="cf4075fd-a261-4d15-b6e3-02d8c346fe74" containerID="14a3ba3e10485d554f246879628bbabccad7fd997873ad7b062cd3bcebf49f79" exitCode=0 Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.879372 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7ghcn" event={"ID":"cf4075fd-a261-4d15-b6e3-02d8c346fe74","Type":"ContainerDied","Data":"14a3ba3e10485d554f246879628bbabccad7fd997873ad7b062cd3bcebf49f79"} Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.879430 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"e751fe7109d37087a55472ddb6f89a2c8bfed2f87c6962734665bcefdbe97bf0"} Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.879484 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 04 11:50:34 crc kubenswrapper[5003]: I0104 11:50:34.898152 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:50:35 crc kubenswrapper[5003]: I0104 11:50:35.499953 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-dkgwj" Jan 04 11:50:35 crc kubenswrapper[5003]: I0104 11:50:35.502787 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-dkgwj" Jan 04 11:50:35 crc kubenswrapper[5003]: I0104 11:50:35.887779 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"48cc6895-e764-4a66-a4ef-c89882ec8ee3","Type":"ContainerStarted","Data":"76e7e119655c12664e0b052db99c921347b96edab6e375286f8b6f0cbc4b98f8"} Jan 04 11:50:35 crc kubenswrapper[5003]: I0104 11:50:35.888745 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"48cc6895-e764-4a66-a4ef-c89882ec8ee3","Type":"ContainerStarted","Data":"4d4b04d3d1590965b73e4f581b0a39e7a5f6fcbc3583c5e9a309fd4093ef98d4"} Jan 04 11:50:35 crc kubenswrapper[5003]: I0104 11:50:35.893654 5003 generic.go:334] "Generic (PLEG): container finished" podID="223eef6f-9ab2-48c7-9d00-c812abc17e96" containerID="8dd70cb904827eb33a174c58d53a3d48b3470c6ad2d868cf787986b391762893" exitCode=0 Jan 04 11:50:35 crc kubenswrapper[5003]: I0104 11:50:35.893711 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v47x4" event={"ID":"223eef6f-9ab2-48c7-9d00-c812abc17e96","Type":"ContainerDied","Data":"8dd70cb904827eb33a174c58d53a3d48b3470c6ad2d868cf787986b391762893"} Jan 04 11:50:35 crc kubenswrapper[5003]: I0104 11:50:35.916396 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.916372273 podStartE2EDuration="2.916372273s" podCreationTimestamp="2026-01-04 11:50:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:50:35.910479845 +0000 UTC m=+151.383509686" watchObservedRunningTime="2026-01-04 11:50:35.916372273 +0000 UTC m=+151.389402114" Jan 04 11:50:36 crc kubenswrapper[5003]: E0104 11:50:36.219539 5003 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-pod48cc6895_e764_4a66_a4ef_c89882ec8ee3.slice/crio-76e7e119655c12664e0b052db99c921347b96edab6e375286f8b6f0cbc4b98f8.scope\": RecentStats: unable to find data in memory cache]" Jan 04 11:50:36 crc kubenswrapper[5003]: I0104 11:50:36.462486 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 04 11:50:36 crc kubenswrapper[5003]: E0104 11:50:36.463288 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1886cfcf-db6d-49b2-8f0a-4637996373db" containerName="collect-profiles" Jan 04 11:50:36 crc kubenswrapper[5003]: I0104 11:50:36.463305 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="1886cfcf-db6d-49b2-8f0a-4637996373db" containerName="collect-profiles" Jan 04 11:50:36 crc kubenswrapper[5003]: I0104 11:50:36.474621 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="1886cfcf-db6d-49b2-8f0a-4637996373db" containerName="collect-profiles" Jan 04 11:50:36 crc kubenswrapper[5003]: I0104 11:50:36.475615 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 04 11:50:36 crc kubenswrapper[5003]: I0104 11:50:36.480141 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 04 11:50:36 crc kubenswrapper[5003]: I0104 11:50:36.480458 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 04 11:50:36 crc kubenswrapper[5003]: I0104 11:50:36.500872 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 04 11:50:36 crc kubenswrapper[5003]: I0104 11:50:36.544056 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f6039ef8-db1d-438c-86de-4e36a59a2d63-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f6039ef8-db1d-438c-86de-4e36a59a2d63\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 04 11:50:36 crc kubenswrapper[5003]: I0104 11:50:36.544144 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f6039ef8-db1d-438c-86de-4e36a59a2d63-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f6039ef8-db1d-438c-86de-4e36a59a2d63\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 04 11:50:36 crc kubenswrapper[5003]: I0104 11:50:36.646032 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f6039ef8-db1d-438c-86de-4e36a59a2d63-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f6039ef8-db1d-438c-86de-4e36a59a2d63\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 04 11:50:36 crc kubenswrapper[5003]: I0104 11:50:36.646164 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f6039ef8-db1d-438c-86de-4e36a59a2d63-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f6039ef8-db1d-438c-86de-4e36a59a2d63\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 04 11:50:36 crc kubenswrapper[5003]: I0104 11:50:36.646434 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f6039ef8-db1d-438c-86de-4e36a59a2d63-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f6039ef8-db1d-438c-86de-4e36a59a2d63\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 04 11:50:36 crc kubenswrapper[5003]: I0104 11:50:36.706767 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f6039ef8-db1d-438c-86de-4e36a59a2d63-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f6039ef8-db1d-438c-86de-4e36a59a2d63\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 04 11:50:36 crc kubenswrapper[5003]: I0104 11:50:36.797398 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 04 11:50:36 crc kubenswrapper[5003]: I0104 11:50:36.908962 5003 generic.go:334] "Generic (PLEG): container finished" podID="48cc6895-e764-4a66-a4ef-c89882ec8ee3" containerID="76e7e119655c12664e0b052db99c921347b96edab6e375286f8b6f0cbc4b98f8" exitCode=0 Jan 04 11:50:36 crc kubenswrapper[5003]: I0104 11:50:36.909031 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"48cc6895-e764-4a66-a4ef-c89882ec8ee3","Type":"ContainerDied","Data":"76e7e119655c12664e0b052db99c921347b96edab6e375286f8b6f0cbc4b98f8"} Jan 04 11:50:37 crc kubenswrapper[5003]: I0104 11:50:37.224211 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 04 11:50:37 crc kubenswrapper[5003]: W0104 11:50:37.355038 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podf6039ef8_db1d_438c_86de_4e36a59a2d63.slice/crio-cf0af99b3957fd837fd143c581c4d827294107d82a7e4a106553ef4b9e55a333 WatchSource:0}: Error finding container cf0af99b3957fd837fd143c581c4d827294107d82a7e4a106553ef4b9e55a333: Status 404 returned error can't find the container with id cf0af99b3957fd837fd143c581c4d827294107d82a7e4a106553ef4b9e55a333 Jan 04 11:50:37 crc kubenswrapper[5003]: I0104 11:50:37.926059 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-cluster-samples-operator_cluster-samples-operator-665b6dd947-2g644_dfb130da-bcbf-46d8-8e7e-2b1f17fe71ce/cluster-samples-operator/0.log" Jan 04 11:50:37 crc kubenswrapper[5003]: I0104 11:50:37.926521 5003 generic.go:334] "Generic (PLEG): container finished" podID="dfb130da-bcbf-46d8-8e7e-2b1f17fe71ce" containerID="98229ec3fb7172ba923a188cc912bbf3d4be7cd0c43e4218d6c027c954a25e1c" exitCode=2 Jan 04 11:50:37 crc kubenswrapper[5003]: I0104 11:50:37.926632 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2g644" event={"ID":"dfb130da-bcbf-46d8-8e7e-2b1f17fe71ce","Type":"ContainerDied","Data":"98229ec3fb7172ba923a188cc912bbf3d4be7cd0c43e4218d6c027c954a25e1c"} Jan 04 11:50:37 crc kubenswrapper[5003]: I0104 11:50:37.927353 5003 scope.go:117] "RemoveContainer" containerID="98229ec3fb7172ba923a188cc912bbf3d4be7cd0c43e4218d6c027c954a25e1c" Jan 04 11:50:37 crc kubenswrapper[5003]: I0104 11:50:37.932074 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f6039ef8-db1d-438c-86de-4e36a59a2d63","Type":"ContainerStarted","Data":"cf0af99b3957fd837fd143c581c4d827294107d82a7e4a106553ef4b9e55a333"} Jan 04 11:50:38 crc kubenswrapper[5003]: I0104 11:50:38.257845 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 04 11:50:38 crc kubenswrapper[5003]: I0104 11:50:38.288754 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/48cc6895-e764-4a66-a4ef-c89882ec8ee3-kube-api-access\") pod \"48cc6895-e764-4a66-a4ef-c89882ec8ee3\" (UID: \"48cc6895-e764-4a66-a4ef-c89882ec8ee3\") " Jan 04 11:50:38 crc kubenswrapper[5003]: I0104 11:50:38.288861 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/48cc6895-e764-4a66-a4ef-c89882ec8ee3-kubelet-dir\") pod \"48cc6895-e764-4a66-a4ef-c89882ec8ee3\" (UID: \"48cc6895-e764-4a66-a4ef-c89882ec8ee3\") " Jan 04 11:50:38 crc kubenswrapper[5003]: I0104 11:50:38.289239 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/48cc6895-e764-4a66-a4ef-c89882ec8ee3-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "48cc6895-e764-4a66-a4ef-c89882ec8ee3" (UID: "48cc6895-e764-4a66-a4ef-c89882ec8ee3"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:50:38 crc kubenswrapper[5003]: I0104 11:50:38.311420 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48cc6895-e764-4a66-a4ef-c89882ec8ee3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "48cc6895-e764-4a66-a4ef-c89882ec8ee3" (UID: "48cc6895-e764-4a66-a4ef-c89882ec8ee3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:50:38 crc kubenswrapper[5003]: I0104 11:50:38.392278 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/48cc6895-e764-4a66-a4ef-c89882ec8ee3-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 04 11:50:38 crc kubenswrapper[5003]: I0104 11:50:38.392309 5003 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/48cc6895-e764-4a66-a4ef-c89882ec8ee3-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 04 11:50:38 crc kubenswrapper[5003]: I0104 11:50:38.985854 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f6039ef8-db1d-438c-86de-4e36a59a2d63","Type":"ContainerStarted","Data":"b23b9ef72b9d23b9efd469ddfafda641db04a877db57fe8b3bc57b5f46d6d5b6"} Jan 04 11:50:39 crc kubenswrapper[5003]: I0104 11:50:39.000702 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"48cc6895-e764-4a66-a4ef-c89882ec8ee3","Type":"ContainerDied","Data":"4d4b04d3d1590965b73e4f581b0a39e7a5f6fcbc3583c5e9a309fd4093ef98d4"} Jan 04 11:50:39 crc kubenswrapper[5003]: I0104 11:50:39.000780 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4d4b04d3d1590965b73e4f581b0a39e7a5f6fcbc3583c5e9a309fd4093ef98d4" Jan 04 11:50:39 crc kubenswrapper[5003]: I0104 11:50:39.000909 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 04 11:50:39 crc kubenswrapper[5003]: I0104 11:50:39.021602 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-cluster-samples-operator_cluster-samples-operator-665b6dd947-2g644_dfb130da-bcbf-46d8-8e7e-2b1f17fe71ce/cluster-samples-operator/0.log" Jan 04 11:50:39 crc kubenswrapper[5003]: I0104 11:50:39.022112 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-2g644" event={"ID":"dfb130da-bcbf-46d8-8e7e-2b1f17fe71ce","Type":"ContainerStarted","Data":"d762958e89c94f4773f31b0948514a87212dfad15e99c09dd75ca27219493510"} Jan 04 11:50:39 crc kubenswrapper[5003]: I0104 11:50:39.362090 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-87wfq" Jan 04 11:50:39 crc kubenswrapper[5003]: I0104 11:50:39.419280 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 11:50:39 crc kubenswrapper[5003]: I0104 11:50:39.419372 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 11:50:40 crc kubenswrapper[5003]: I0104 11:50:40.049530 5003 generic.go:334] "Generic (PLEG): container finished" podID="f6039ef8-db1d-438c-86de-4e36a59a2d63" containerID="b23b9ef72b9d23b9efd469ddfafda641db04a877db57fe8b3bc57b5f46d6d5b6" exitCode=0 Jan 04 11:50:40 crc kubenswrapper[5003]: I0104 11:50:40.049582 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f6039ef8-db1d-438c-86de-4e36a59a2d63","Type":"ContainerDied","Data":"b23b9ef72b9d23b9efd469ddfafda641db04a877db57fe8b3bc57b5f46d6d5b6"} Jan 04 11:50:40 crc kubenswrapper[5003]: I0104 11:50:40.484140 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 04 11:50:40 crc kubenswrapper[5003]: I0104 11:50:40.526635 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f6039ef8-db1d-438c-86de-4e36a59a2d63-kube-api-access\") pod \"f6039ef8-db1d-438c-86de-4e36a59a2d63\" (UID: \"f6039ef8-db1d-438c-86de-4e36a59a2d63\") " Jan 04 11:50:40 crc kubenswrapper[5003]: I0104 11:50:40.526747 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f6039ef8-db1d-438c-86de-4e36a59a2d63-kubelet-dir\") pod \"f6039ef8-db1d-438c-86de-4e36a59a2d63\" (UID: \"f6039ef8-db1d-438c-86de-4e36a59a2d63\") " Jan 04 11:50:40 crc kubenswrapper[5003]: I0104 11:50:40.526826 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f6039ef8-db1d-438c-86de-4e36a59a2d63-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "f6039ef8-db1d-438c-86de-4e36a59a2d63" (UID: "f6039ef8-db1d-438c-86de-4e36a59a2d63"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:50:40 crc kubenswrapper[5003]: I0104 11:50:40.527128 5003 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f6039ef8-db1d-438c-86de-4e36a59a2d63-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 04 11:50:40 crc kubenswrapper[5003]: I0104 11:50:40.533410 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6039ef8-db1d-438c-86de-4e36a59a2d63-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "f6039ef8-db1d-438c-86de-4e36a59a2d63" (UID: "f6039ef8-db1d-438c-86de-4e36a59a2d63"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:50:40 crc kubenswrapper[5003]: I0104 11:50:40.628033 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f6039ef8-db1d-438c-86de-4e36a59a2d63-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 04 11:50:41 crc kubenswrapper[5003]: I0104 11:50:41.062000 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f6039ef8-db1d-438c-86de-4e36a59a2d63","Type":"ContainerDied","Data":"cf0af99b3957fd837fd143c581c4d827294107d82a7e4a106553ef4b9e55a333"} Jan 04 11:50:41 crc kubenswrapper[5003]: I0104 11:50:41.062074 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cf0af99b3957fd837fd143c581c4d827294107d82a7e4a106553ef4b9e55a333" Jan 04 11:50:41 crc kubenswrapper[5003]: I0104 11:50:41.062075 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 04 11:50:43 crc kubenswrapper[5003]: I0104 11:50:43.344872 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-2jt99" Jan 04 11:50:43 crc kubenswrapper[5003]: I0104 11:50:43.350878 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-2jt99" Jan 04 11:50:43 crc kubenswrapper[5003]: I0104 11:50:43.704078 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-pgx7w" Jan 04 11:50:50 crc kubenswrapper[5003]: I0104 11:50:50.803294 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cd6de6ec-2a7c-4842-9d8a-ba4032acb50e-metrics-certs\") pod \"network-metrics-daemon-n2zwh\" (UID: \"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e\") " pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:50:50 crc kubenswrapper[5003]: I0104 11:50:50.810579 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cd6de6ec-2a7c-4842-9d8a-ba4032acb50e-metrics-certs\") pod \"network-metrics-daemon-n2zwh\" (UID: \"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e\") " pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:50:50 crc kubenswrapper[5003]: I0104 11:50:50.830822 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2zwh" Jan 04 11:50:51 crc kubenswrapper[5003]: I0104 11:50:51.230703 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:51:03 crc kubenswrapper[5003]: I0104 11:51:03.632230 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-qgwds" Jan 04 11:51:04 crc kubenswrapper[5003]: I0104 11:51:04.291304 5003 patch_prober.go:28] interesting pod/console-operator-58897d9998-cw9t7 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.24:8443/readyz\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 04 11:51:04 crc kubenswrapper[5003]: I0104 11:51:04.291721 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-cw9t7" podUID="831746c3-7123-4189-a551-7f7852402807" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.24:8443/readyz\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 04 11:51:09 crc kubenswrapper[5003]: I0104 11:51:09.418846 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 11:51:09 crc kubenswrapper[5003]: I0104 11:51:09.420147 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 11:51:10 crc kubenswrapper[5003]: I0104 11:51:10.852201 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 04 11:51:10 crc kubenswrapper[5003]: E0104 11:51:10.852733 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6039ef8-db1d-438c-86de-4e36a59a2d63" containerName="pruner" Jan 04 11:51:10 crc kubenswrapper[5003]: I0104 11:51:10.852766 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6039ef8-db1d-438c-86de-4e36a59a2d63" containerName="pruner" Jan 04 11:51:10 crc kubenswrapper[5003]: E0104 11:51:10.852814 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48cc6895-e764-4a66-a4ef-c89882ec8ee3" containerName="pruner" Jan 04 11:51:10 crc kubenswrapper[5003]: I0104 11:51:10.852831 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="48cc6895-e764-4a66-a4ef-c89882ec8ee3" containerName="pruner" Jan 04 11:51:10 crc kubenswrapper[5003]: I0104 11:51:10.853110 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="48cc6895-e764-4a66-a4ef-c89882ec8ee3" containerName="pruner" Jan 04 11:51:10 crc kubenswrapper[5003]: I0104 11:51:10.853148 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6039ef8-db1d-438c-86de-4e36a59a2d63" containerName="pruner" Jan 04 11:51:10 crc kubenswrapper[5003]: I0104 11:51:10.854112 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 04 11:51:10 crc kubenswrapper[5003]: I0104 11:51:10.858408 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 04 11:51:10 crc kubenswrapper[5003]: I0104 11:51:10.858761 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 04 11:51:10 crc kubenswrapper[5003]: I0104 11:51:10.882207 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 04 11:51:10 crc kubenswrapper[5003]: E0104 11:51:10.980470 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 04 11:51:10 crc kubenswrapper[5003]: E0104 11:51:10.980820 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6kjrn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-9t6r8_openshift-marketplace(527e50de-5345-49f4-9ce8-3aaf8d446bed): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 04 11:51:10 crc kubenswrapper[5003]: E0104 11:51:10.982096 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-9t6r8" podUID="527e50de-5345-49f4-9ce8-3aaf8d446bed" Jan 04 11:51:11 crc kubenswrapper[5003]: I0104 11:51:11.019558 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9e8da04b-cd02-4b5c-9dd8-28fd1225cb77-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"9e8da04b-cd02-4b5c-9dd8-28fd1225cb77\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 04 11:51:11 crc kubenswrapper[5003]: I0104 11:51:11.019641 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9e8da04b-cd02-4b5c-9dd8-28fd1225cb77-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"9e8da04b-cd02-4b5c-9dd8-28fd1225cb77\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 04 11:51:11 crc kubenswrapper[5003]: I0104 11:51:11.120732 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9e8da04b-cd02-4b5c-9dd8-28fd1225cb77-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"9e8da04b-cd02-4b5c-9dd8-28fd1225cb77\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 04 11:51:11 crc kubenswrapper[5003]: I0104 11:51:11.120800 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9e8da04b-cd02-4b5c-9dd8-28fd1225cb77-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"9e8da04b-cd02-4b5c-9dd8-28fd1225cb77\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 04 11:51:11 crc kubenswrapper[5003]: I0104 11:51:11.120841 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9e8da04b-cd02-4b5c-9dd8-28fd1225cb77-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"9e8da04b-cd02-4b5c-9dd8-28fd1225cb77\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 04 11:51:11 crc kubenswrapper[5003]: I0104 11:51:11.154617 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9e8da04b-cd02-4b5c-9dd8-28fd1225cb77-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"9e8da04b-cd02-4b5c-9dd8-28fd1225cb77\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 04 11:51:11 crc kubenswrapper[5003]: I0104 11:51:11.197737 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 04 11:51:12 crc kubenswrapper[5003]: I0104 11:51:12.045465 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 04 11:51:12 crc kubenswrapper[5003]: E0104 11:51:12.329670 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-9t6r8" podUID="527e50de-5345-49f4-9ce8-3aaf8d446bed" Jan 04 11:51:12 crc kubenswrapper[5003]: E0104 11:51:12.393910 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 04 11:51:12 crc kubenswrapper[5003]: E0104 11:51:12.394099 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qcnfb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-984zr_openshift-marketplace(43dd1701-8159-424a-b98b-c3dd5f2b9ad8): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 04 11:51:12 crc kubenswrapper[5003]: E0104 11:51:12.395625 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-984zr" podUID="43dd1701-8159-424a-b98b-c3dd5f2b9ad8" Jan 04 11:51:14 crc kubenswrapper[5003]: E0104 11:51:14.095194 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-984zr" podUID="43dd1701-8159-424a-b98b-c3dd5f2b9ad8" Jan 04 11:51:14 crc kubenswrapper[5003]: E0104 11:51:14.169219 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 04 11:51:14 crc kubenswrapper[5003]: E0104 11:51:14.169434 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-htccq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-mxr5x_openshift-marketplace(b4141980-5ab0-4976-81aa-80a2245ae245): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 04 11:51:14 crc kubenswrapper[5003]: E0104 11:51:14.170689 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-mxr5x" podUID="b4141980-5ab0-4976-81aa-80a2245ae245" Jan 04 11:51:14 crc kubenswrapper[5003]: E0104 11:51:14.181999 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 04 11:51:14 crc kubenswrapper[5003]: E0104 11:51:14.182183 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-c227h,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-hsgvz_openshift-marketplace(7aedb7cd-d9d3-441b-af72-a11878071f3f): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 04 11:51:14 crc kubenswrapper[5003]: E0104 11:51:14.183471 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-hsgvz" podUID="7aedb7cd-d9d3-441b-af72-a11878071f3f" Jan 04 11:51:14 crc kubenswrapper[5003]: E0104 11:51:14.238448 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 04 11:51:14 crc kubenswrapper[5003]: E0104 11:51:14.238668 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-565jz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-v47x4_openshift-marketplace(223eef6f-9ab2-48c7-9d00-c812abc17e96): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 04 11:51:14 crc kubenswrapper[5003]: E0104 11:51:14.240047 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-v47x4" podUID="223eef6f-9ab2-48c7-9d00-c812abc17e96" Jan 04 11:51:15 crc kubenswrapper[5003]: E0104 11:51:15.410914 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-v47x4" podUID="223eef6f-9ab2-48c7-9d00-c812abc17e96" Jan 04 11:51:15 crc kubenswrapper[5003]: E0104 11:51:15.410961 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-hsgvz" podUID="7aedb7cd-d9d3-441b-af72-a11878071f3f" Jan 04 11:51:15 crc kubenswrapper[5003]: E0104 11:51:15.411097 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-mxr5x" podUID="b4141980-5ab0-4976-81aa-80a2245ae245" Jan 04 11:51:15 crc kubenswrapper[5003]: E0104 11:51:15.509619 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 04 11:51:15 crc kubenswrapper[5003]: E0104 11:51:15.510183 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xbgtq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-7ghcn_openshift-marketplace(cf4075fd-a261-4d15-b6e3-02d8c346fe74): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 04 11:51:15 crc kubenswrapper[5003]: E0104 11:51:15.511442 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-7ghcn" podUID="cf4075fd-a261-4d15-b6e3-02d8c346fe74" Jan 04 11:51:15 crc kubenswrapper[5003]: E0104 11:51:15.661106 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 04 11:51:15 crc kubenswrapper[5003]: E0104 11:51:15.661618 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qv7d7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-87zs8_openshift-marketplace(c02a9169-294b-4986-823b-e77965bc257d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 04 11:51:15 crc kubenswrapper[5003]: E0104 11:51:15.662872 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-87zs8" podUID="c02a9169-294b-4986-823b-e77965bc257d" Jan 04 11:51:15 crc kubenswrapper[5003]: E0104 11:51:15.705390 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 04 11:51:15 crc kubenswrapper[5003]: E0104 11:51:15.705569 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4d975,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-gjbcx_openshift-marketplace(6c5933bf-668f-4062-998b-3b2c5ad3a811): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 04 11:51:15 crc kubenswrapper[5003]: E0104 11:51:15.706742 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-gjbcx" podUID="6c5933bf-668f-4062-998b-3b2c5ad3a811" Jan 04 11:51:15 crc kubenswrapper[5003]: W0104 11:51:15.821241 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcd6de6ec_2a7c_4842_9d8a_ba4032acb50e.slice/crio-a5fcb73c1c50d541fd74607de20c9ead8f777d2b522fb632a179d9234c6cbcb2 WatchSource:0}: Error finding container a5fcb73c1c50d541fd74607de20c9ead8f777d2b522fb632a179d9234c6cbcb2: Status 404 returned error can't find the container with id a5fcb73c1c50d541fd74607de20c9ead8f777d2b522fb632a179d9234c6cbcb2 Jan 04 11:51:15 crc kubenswrapper[5003]: I0104 11:51:15.822862 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-n2zwh"] Jan 04 11:51:15 crc kubenswrapper[5003]: I0104 11:51:15.887992 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 04 11:51:16 crc kubenswrapper[5003]: I0104 11:51:16.295277 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-n2zwh" event={"ID":"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e","Type":"ContainerStarted","Data":"a5fcb73c1c50d541fd74607de20c9ead8f777d2b522fb632a179d9234c6cbcb2"} Jan 04 11:51:16 crc kubenswrapper[5003]: I0104 11:51:16.296155 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"9e8da04b-cd02-4b5c-9dd8-28fd1225cb77","Type":"ContainerStarted","Data":"a71f5bbdb5dd2990035f6b543892c775151107968f848c36c9aa4bb12b285142"} Jan 04 11:51:16 crc kubenswrapper[5003]: E0104 11:51:16.298066 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-87zs8" podUID="c02a9169-294b-4986-823b-e77965bc257d" Jan 04 11:51:16 crc kubenswrapper[5003]: E0104 11:51:16.298321 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-gjbcx" podUID="6c5933bf-668f-4062-998b-3b2c5ad3a811" Jan 04 11:51:16 crc kubenswrapper[5003]: E0104 11:51:16.298492 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-7ghcn" podUID="cf4075fd-a261-4d15-b6e3-02d8c346fe74" Jan 04 11:51:16 crc kubenswrapper[5003]: I0104 11:51:16.451147 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 04 11:51:16 crc kubenswrapper[5003]: I0104 11:51:16.452424 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 04 11:51:16 crc kubenswrapper[5003]: I0104 11:51:16.454677 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 04 11:51:16 crc kubenswrapper[5003]: I0104 11:51:16.602109 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/a9e44ab2-84f1-4907-916e-6c04d89cc1bb-var-lock\") pod \"installer-9-crc\" (UID: \"a9e44ab2-84f1-4907-916e-6c04d89cc1bb\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 04 11:51:16 crc kubenswrapper[5003]: I0104 11:51:16.602249 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a9e44ab2-84f1-4907-916e-6c04d89cc1bb-kube-api-access\") pod \"installer-9-crc\" (UID: \"a9e44ab2-84f1-4907-916e-6c04d89cc1bb\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 04 11:51:16 crc kubenswrapper[5003]: I0104 11:51:16.602292 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a9e44ab2-84f1-4907-916e-6c04d89cc1bb-kubelet-dir\") pod \"installer-9-crc\" (UID: \"a9e44ab2-84f1-4907-916e-6c04d89cc1bb\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 04 11:51:16 crc kubenswrapper[5003]: I0104 11:51:16.703529 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a9e44ab2-84f1-4907-916e-6c04d89cc1bb-kube-api-access\") pod \"installer-9-crc\" (UID: \"a9e44ab2-84f1-4907-916e-6c04d89cc1bb\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 04 11:51:16 crc kubenswrapper[5003]: I0104 11:51:16.703575 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a9e44ab2-84f1-4907-916e-6c04d89cc1bb-kubelet-dir\") pod \"installer-9-crc\" (UID: \"a9e44ab2-84f1-4907-916e-6c04d89cc1bb\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 04 11:51:16 crc kubenswrapper[5003]: I0104 11:51:16.703641 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/a9e44ab2-84f1-4907-916e-6c04d89cc1bb-var-lock\") pod \"installer-9-crc\" (UID: \"a9e44ab2-84f1-4907-916e-6c04d89cc1bb\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 04 11:51:16 crc kubenswrapper[5003]: I0104 11:51:16.703729 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/a9e44ab2-84f1-4907-916e-6c04d89cc1bb-var-lock\") pod \"installer-9-crc\" (UID: \"a9e44ab2-84f1-4907-916e-6c04d89cc1bb\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 04 11:51:16 crc kubenswrapper[5003]: I0104 11:51:16.703824 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a9e44ab2-84f1-4907-916e-6c04d89cc1bb-kubelet-dir\") pod \"installer-9-crc\" (UID: \"a9e44ab2-84f1-4907-916e-6c04d89cc1bb\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 04 11:51:16 crc kubenswrapper[5003]: I0104 11:51:16.732635 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a9e44ab2-84f1-4907-916e-6c04d89cc1bb-kube-api-access\") pod \"installer-9-crc\" (UID: \"a9e44ab2-84f1-4907-916e-6c04d89cc1bb\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 04 11:51:16 crc kubenswrapper[5003]: I0104 11:51:16.817031 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 04 11:51:17 crc kubenswrapper[5003]: I0104 11:51:17.242178 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 04 11:51:17 crc kubenswrapper[5003]: W0104 11:51:17.265848 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-poda9e44ab2_84f1_4907_916e_6c04d89cc1bb.slice/crio-bba97a294c9299684d70412ceab603c9ba6932f0edbcd8f7d97d1ea2a7e99305 WatchSource:0}: Error finding container bba97a294c9299684d70412ceab603c9ba6932f0edbcd8f7d97d1ea2a7e99305: Status 404 returned error can't find the container with id bba97a294c9299684d70412ceab603c9ba6932f0edbcd8f7d97d1ea2a7e99305 Jan 04 11:51:17 crc kubenswrapper[5003]: I0104 11:51:17.304579 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"a9e44ab2-84f1-4907-916e-6c04d89cc1bb","Type":"ContainerStarted","Data":"bba97a294c9299684d70412ceab603c9ba6932f0edbcd8f7d97d1ea2a7e99305"} Jan 04 11:51:22 crc kubenswrapper[5003]: I0104 11:51:22.348872 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"a9e44ab2-84f1-4907-916e-6c04d89cc1bb","Type":"ContainerStarted","Data":"f98bdb7ea48f6ecf6e09abf79f23be0bb216d871a971af34027a0c43fb6d502a"} Jan 04 11:51:22 crc kubenswrapper[5003]: I0104 11:51:22.353581 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-n2zwh" event={"ID":"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e","Type":"ContainerStarted","Data":"6df04ad6afac13dd4f21d6a944314cb5984c51f1f30ca4fc1c87a010f4e19929"} Jan 04 11:51:22 crc kubenswrapper[5003]: I0104 11:51:22.353661 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-n2zwh" event={"ID":"cd6de6ec-2a7c-4842-9d8a-ba4032acb50e","Type":"ContainerStarted","Data":"e200ad3bb7a2210247a388cd297b47ea422f8472562e1bc4e648a6c324b4f341"} Jan 04 11:51:22 crc kubenswrapper[5003]: I0104 11:51:22.356754 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"9e8da04b-cd02-4b5c-9dd8-28fd1225cb77","Type":"ContainerStarted","Data":"940d410b22ad6429211ab641f4d692730495562ad4b554beb67a4a6444e1209f"} Jan 04 11:51:22 crc kubenswrapper[5003]: I0104 11:51:22.369796 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=6.36977005 podStartE2EDuration="6.36977005s" podCreationTimestamp="2026-01-04 11:51:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:51:22.365529456 +0000 UTC m=+197.838559297" watchObservedRunningTime="2026-01-04 11:51:22.36977005 +0000 UTC m=+197.842799901" Jan 04 11:51:22 crc kubenswrapper[5003]: I0104 11:51:22.393668 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=12.393644898 podStartE2EDuration="12.393644898s" podCreationTimestamp="2026-01-04 11:51:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:51:22.39072536 +0000 UTC m=+197.863755201" watchObservedRunningTime="2026-01-04 11:51:22.393644898 +0000 UTC m=+197.866674739" Jan 04 11:51:22 crc kubenswrapper[5003]: I0104 11:51:22.410814 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-n2zwh" podStartSLOduration=174.410794107 podStartE2EDuration="2m54.410794107s" podCreationTimestamp="2026-01-04 11:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:51:22.409254186 +0000 UTC m=+197.882284027" watchObservedRunningTime="2026-01-04 11:51:22.410794107 +0000 UTC m=+197.883823948" Jan 04 11:51:23 crc kubenswrapper[5003]: I0104 11:51:23.367277 5003 generic.go:334] "Generic (PLEG): container finished" podID="9e8da04b-cd02-4b5c-9dd8-28fd1225cb77" containerID="940d410b22ad6429211ab641f4d692730495562ad4b554beb67a4a6444e1209f" exitCode=0 Jan 04 11:51:23 crc kubenswrapper[5003]: I0104 11:51:23.367358 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"9e8da04b-cd02-4b5c-9dd8-28fd1225cb77","Type":"ContainerDied","Data":"940d410b22ad6429211ab641f4d692730495562ad4b554beb67a4a6444e1209f"} Jan 04 11:51:24 crc kubenswrapper[5003]: I0104 11:51:24.595327 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 04 11:51:24 crc kubenswrapper[5003]: I0104 11:51:24.728316 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9e8da04b-cd02-4b5c-9dd8-28fd1225cb77-kube-api-access\") pod \"9e8da04b-cd02-4b5c-9dd8-28fd1225cb77\" (UID: \"9e8da04b-cd02-4b5c-9dd8-28fd1225cb77\") " Jan 04 11:51:24 crc kubenswrapper[5003]: I0104 11:51:24.728769 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9e8da04b-cd02-4b5c-9dd8-28fd1225cb77-kubelet-dir\") pod \"9e8da04b-cd02-4b5c-9dd8-28fd1225cb77\" (UID: \"9e8da04b-cd02-4b5c-9dd8-28fd1225cb77\") " Jan 04 11:51:24 crc kubenswrapper[5003]: I0104 11:51:24.728920 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9e8da04b-cd02-4b5c-9dd8-28fd1225cb77-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "9e8da04b-cd02-4b5c-9dd8-28fd1225cb77" (UID: "9e8da04b-cd02-4b5c-9dd8-28fd1225cb77"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:51:24 crc kubenswrapper[5003]: I0104 11:51:24.729193 5003 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9e8da04b-cd02-4b5c-9dd8-28fd1225cb77-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 04 11:51:24 crc kubenswrapper[5003]: I0104 11:51:24.737776 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e8da04b-cd02-4b5c-9dd8-28fd1225cb77-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "9e8da04b-cd02-4b5c-9dd8-28fd1225cb77" (UID: "9e8da04b-cd02-4b5c-9dd8-28fd1225cb77"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:51:24 crc kubenswrapper[5003]: I0104 11:51:24.830383 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9e8da04b-cd02-4b5c-9dd8-28fd1225cb77-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 04 11:51:25 crc kubenswrapper[5003]: I0104 11:51:25.387491 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"9e8da04b-cd02-4b5c-9dd8-28fd1225cb77","Type":"ContainerDied","Data":"a71f5bbdb5dd2990035f6b543892c775151107968f848c36c9aa4bb12b285142"} Jan 04 11:51:25 crc kubenswrapper[5003]: I0104 11:51:25.387541 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a71f5bbdb5dd2990035f6b543892c775151107968f848c36c9aa4bb12b285142" Jan 04 11:51:25 crc kubenswrapper[5003]: I0104 11:51:25.388379 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 04 11:51:30 crc kubenswrapper[5003]: I0104 11:51:30.417737 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9t6r8" event={"ID":"527e50de-5345-49f4-9ce8-3aaf8d446bed","Type":"ContainerStarted","Data":"ecd56eb6896103108eb5f031b82468f6b1fb4715f21eafa4fb0429b43ca8b481"} Jan 04 11:51:31 crc kubenswrapper[5003]: I0104 11:51:31.424917 5003 generic.go:334] "Generic (PLEG): container finished" podID="527e50de-5345-49f4-9ce8-3aaf8d446bed" containerID="ecd56eb6896103108eb5f031b82468f6b1fb4715f21eafa4fb0429b43ca8b481" exitCode=0 Jan 04 11:51:31 crc kubenswrapper[5003]: I0104 11:51:31.424991 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9t6r8" event={"ID":"527e50de-5345-49f4-9ce8-3aaf8d446bed","Type":"ContainerDied","Data":"ecd56eb6896103108eb5f031b82468f6b1fb4715f21eafa4fb0429b43ca8b481"} Jan 04 11:51:32 crc kubenswrapper[5003]: I0104 11:51:32.436315 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v47x4" event={"ID":"223eef6f-9ab2-48c7-9d00-c812abc17e96","Type":"ContainerStarted","Data":"5bb2a70850a41229f0da99d41e14db4282a3b451440a7ef4d14f3513e8efae8e"} Jan 04 11:51:32 crc kubenswrapper[5003]: I0104 11:51:32.438525 5003 generic.go:334] "Generic (PLEG): container finished" podID="c02a9169-294b-4986-823b-e77965bc257d" containerID="b54ab509f4f644268492504991b65d45c35fba4b4e6950ec5b9825139112dfd2" exitCode=0 Jan 04 11:51:32 crc kubenswrapper[5003]: I0104 11:51:32.438521 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-87zs8" event={"ID":"c02a9169-294b-4986-823b-e77965bc257d","Type":"ContainerDied","Data":"b54ab509f4f644268492504991b65d45c35fba4b4e6950ec5b9825139112dfd2"} Jan 04 11:51:32 crc kubenswrapper[5003]: I0104 11:51:32.440609 5003 generic.go:334] "Generic (PLEG): container finished" podID="7aedb7cd-d9d3-441b-af72-a11878071f3f" containerID="118495972f58a8328051b34e1ced586241f3c598cb5d52e7ce8205cfb074b178" exitCode=0 Jan 04 11:51:32 crc kubenswrapper[5003]: I0104 11:51:32.440665 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsgvz" event={"ID":"7aedb7cd-d9d3-441b-af72-a11878071f3f","Type":"ContainerDied","Data":"118495972f58a8328051b34e1ced586241f3c598cb5d52e7ce8205cfb074b178"} Jan 04 11:51:32 crc kubenswrapper[5003]: I0104 11:51:32.444270 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gjbcx" event={"ID":"6c5933bf-668f-4062-998b-3b2c5ad3a811","Type":"ContainerStarted","Data":"39eb7bc8a97905a00d340f01c044347d2affb6c1390327ca0afdb32c539005d1"} Jan 04 11:51:32 crc kubenswrapper[5003]: I0104 11:51:32.448776 5003 generic.go:334] "Generic (PLEG): container finished" podID="43dd1701-8159-424a-b98b-c3dd5f2b9ad8" containerID="642c66d299ea148c1cc398835b7b28a3e0159bb76c69ad5fe08c3d5e75f02081" exitCode=0 Jan 04 11:51:32 crc kubenswrapper[5003]: I0104 11:51:32.448873 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-984zr" event={"ID":"43dd1701-8159-424a-b98b-c3dd5f2b9ad8","Type":"ContainerDied","Data":"642c66d299ea148c1cc398835b7b28a3e0159bb76c69ad5fe08c3d5e75f02081"} Jan 04 11:51:32 crc kubenswrapper[5003]: I0104 11:51:32.452235 5003 generic.go:334] "Generic (PLEG): container finished" podID="b4141980-5ab0-4976-81aa-80a2245ae245" containerID="f720f9f7d17b7226d0a9dfa47cb6f463f46360a512ccf72b364517ddd81ad339" exitCode=0 Jan 04 11:51:32 crc kubenswrapper[5003]: I0104 11:51:32.452277 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mxr5x" event={"ID":"b4141980-5ab0-4976-81aa-80a2245ae245","Type":"ContainerDied","Data":"f720f9f7d17b7226d0a9dfa47cb6f463f46360a512ccf72b364517ddd81ad339"} Jan 04 11:51:33 crc kubenswrapper[5003]: I0104 11:51:33.460324 5003 generic.go:334] "Generic (PLEG): container finished" podID="6c5933bf-668f-4062-998b-3b2c5ad3a811" containerID="39eb7bc8a97905a00d340f01c044347d2affb6c1390327ca0afdb32c539005d1" exitCode=0 Jan 04 11:51:33 crc kubenswrapper[5003]: I0104 11:51:33.460397 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gjbcx" event={"ID":"6c5933bf-668f-4062-998b-3b2c5ad3a811","Type":"ContainerDied","Data":"39eb7bc8a97905a00d340f01c044347d2affb6c1390327ca0afdb32c539005d1"} Jan 04 11:51:33 crc kubenswrapper[5003]: I0104 11:51:33.463609 5003 generic.go:334] "Generic (PLEG): container finished" podID="cf4075fd-a261-4d15-b6e3-02d8c346fe74" containerID="257c82e4a8eb9d14ae6b3c3f1d6c8aeee276988b29757efe5bc524267cf47892" exitCode=0 Jan 04 11:51:33 crc kubenswrapper[5003]: I0104 11:51:33.463676 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7ghcn" event={"ID":"cf4075fd-a261-4d15-b6e3-02d8c346fe74","Type":"ContainerDied","Data":"257c82e4a8eb9d14ae6b3c3f1d6c8aeee276988b29757efe5bc524267cf47892"} Jan 04 11:51:33 crc kubenswrapper[5003]: I0104 11:51:33.465952 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9t6r8" event={"ID":"527e50de-5345-49f4-9ce8-3aaf8d446bed","Type":"ContainerStarted","Data":"240f370141e056c8e328a585ad73edd8c28f502b0c8f9f23c424c18e1557ecd3"} Jan 04 11:51:33 crc kubenswrapper[5003]: I0104 11:51:33.467752 5003 generic.go:334] "Generic (PLEG): container finished" podID="223eef6f-9ab2-48c7-9d00-c812abc17e96" containerID="5bb2a70850a41229f0da99d41e14db4282a3b451440a7ef4d14f3513e8efae8e" exitCode=0 Jan 04 11:51:33 crc kubenswrapper[5003]: I0104 11:51:33.467777 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v47x4" event={"ID":"223eef6f-9ab2-48c7-9d00-c812abc17e96","Type":"ContainerDied","Data":"5bb2a70850a41229f0da99d41e14db4282a3b451440a7ef4d14f3513e8efae8e"} Jan 04 11:51:33 crc kubenswrapper[5003]: I0104 11:51:33.532792 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9t6r8" podStartSLOduration=3.780166946 podStartE2EDuration="1m1.532773446s" podCreationTimestamp="2026-01-04 11:50:32 +0000 UTC" firstStartedPulling="2026-01-04 11:50:34.720387161 +0000 UTC m=+150.193416992" lastFinishedPulling="2026-01-04 11:51:32.472993651 +0000 UTC m=+207.946023492" observedRunningTime="2026-01-04 11:51:33.512929165 +0000 UTC m=+208.985959026" watchObservedRunningTime="2026-01-04 11:51:33.532773446 +0000 UTC m=+209.005803287" Jan 04 11:51:33 crc kubenswrapper[5003]: I0104 11:51:33.586197 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9t6r8" Jan 04 11:51:33 crc kubenswrapper[5003]: I0104 11:51:33.586274 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9t6r8" Jan 04 11:51:34 crc kubenswrapper[5003]: I0104 11:51:34.474765 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-87zs8" event={"ID":"c02a9169-294b-4986-823b-e77965bc257d","Type":"ContainerStarted","Data":"ad5c2523671f165250816e5e54535cdfefd9339ae1aab19c83526ee5d7003dbc"} Jan 04 11:51:34 crc kubenswrapper[5003]: I0104 11:51:34.740779 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-9t6r8" podUID="527e50de-5345-49f4-9ce8-3aaf8d446bed" containerName="registry-server" probeResult="failure" output=< Jan 04 11:51:34 crc kubenswrapper[5003]: timeout: failed to connect service ":50051" within 1s Jan 04 11:51:34 crc kubenswrapper[5003]: > Jan 04 11:51:35 crc kubenswrapper[5003]: I0104 11:51:35.508407 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-87zs8" podStartSLOduration=5.430963013 podStartE2EDuration="1m3.508377278s" podCreationTimestamp="2026-01-04 11:50:32 +0000 UTC" firstStartedPulling="2026-01-04 11:50:34.771575428 +0000 UTC m=+150.244605269" lastFinishedPulling="2026-01-04 11:51:32.848989693 +0000 UTC m=+208.322019534" observedRunningTime="2026-01-04 11:51:35.507595497 +0000 UTC m=+210.980625338" watchObservedRunningTime="2026-01-04 11:51:35.508377278 +0000 UTC m=+210.981407159" Jan 04 11:51:38 crc kubenswrapper[5003]: I0104 11:51:38.504140 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsgvz" event={"ID":"7aedb7cd-d9d3-441b-af72-a11878071f3f","Type":"ContainerStarted","Data":"8c60d451778edc3179603f57392557166aa6af054ea34f5475f2c5ebf3d33fc2"} Jan 04 11:51:39 crc kubenswrapper[5003]: I0104 11:51:39.419190 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 11:51:39 crc kubenswrapper[5003]: I0104 11:51:39.419273 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 11:51:39 crc kubenswrapper[5003]: I0104 11:51:39.419338 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 11:51:39 crc kubenswrapper[5003]: I0104 11:51:39.419995 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039"} pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 11:51:39 crc kubenswrapper[5003]: I0104 11:51:39.420134 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" containerID="cri-o://0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039" gracePeriod=600 Jan 04 11:51:40 crc kubenswrapper[5003]: I0104 11:51:40.447298 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hsgvz" Jan 04 11:51:40 crc kubenswrapper[5003]: I0104 11:51:40.447424 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hsgvz" Jan 04 11:51:40 crc kubenswrapper[5003]: I0104 11:51:40.496446 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hsgvz" Jan 04 11:51:40 crc kubenswrapper[5003]: I0104 11:51:40.515134 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hsgvz" podStartSLOduration=4.89033544 podStartE2EDuration="1m10.515106213s" podCreationTimestamp="2026-01-04 11:50:30 +0000 UTC" firstStartedPulling="2026-01-04 11:50:31.461447621 +0000 UTC m=+146.934477462" lastFinishedPulling="2026-01-04 11:51:37.086218394 +0000 UTC m=+212.559248235" observedRunningTime="2026-01-04 11:51:39.53505848 +0000 UTC m=+215.008088321" watchObservedRunningTime="2026-01-04 11:51:40.515106213 +0000 UTC m=+215.988136084" Jan 04 11:51:41 crc kubenswrapper[5003]: I0104 11:51:41.528617 5003 generic.go:334] "Generic (PLEG): container finished" podID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerID="0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039" exitCode=0 Jan 04 11:51:41 crc kubenswrapper[5003]: I0104 11:51:41.528706 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerDied","Data":"0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039"} Jan 04 11:51:42 crc kubenswrapper[5003]: I0104 11:51:42.411543 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-87zs8" Jan 04 11:51:42 crc kubenswrapper[5003]: I0104 11:51:42.411975 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-87zs8" Jan 04 11:51:42 crc kubenswrapper[5003]: I0104 11:51:42.453951 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-87zs8" Jan 04 11:51:42 crc kubenswrapper[5003]: I0104 11:51:42.576443 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-87zs8" Jan 04 11:51:42 crc kubenswrapper[5003]: I0104 11:51:42.686970 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-87zs8"] Jan 04 11:51:43 crc kubenswrapper[5003]: I0104 11:51:43.545166 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerStarted","Data":"38ae09deeffc8fbaeb90157004c63691cc52dd226000ec476514c7c6cd21089c"} Jan 04 11:51:43 crc kubenswrapper[5003]: I0104 11:51:43.547764 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mxr5x" event={"ID":"b4141980-5ab0-4976-81aa-80a2245ae245","Type":"ContainerStarted","Data":"685bb52bd3f838111357a4ab20126addcf1f043c7a5d2e27e11d64b56856426e"} Jan 04 11:51:43 crc kubenswrapper[5003]: I0104 11:51:43.646044 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9t6r8" Jan 04 11:51:43 crc kubenswrapper[5003]: I0104 11:51:43.692080 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9t6r8" Jan 04 11:51:44 crc kubenswrapper[5003]: I0104 11:51:44.552847 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-87zs8" podUID="c02a9169-294b-4986-823b-e77965bc257d" containerName="registry-server" containerID="cri-o://ad5c2523671f165250816e5e54535cdfefd9339ae1aab19c83526ee5d7003dbc" gracePeriod=2 Jan 04 11:51:44 crc kubenswrapper[5003]: I0104 11:51:44.573043 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mxr5x" podStartSLOduration=5.616764882 podStartE2EDuration="1m15.57302183s" podCreationTimestamp="2026-01-04 11:50:29 +0000 UTC" firstStartedPulling="2026-01-04 11:50:31.442689556 +0000 UTC m=+146.915719397" lastFinishedPulling="2026-01-04 11:51:41.398946504 +0000 UTC m=+216.871976345" observedRunningTime="2026-01-04 11:51:44.571277513 +0000 UTC m=+220.044307374" watchObservedRunningTime="2026-01-04 11:51:44.57302183 +0000 UTC m=+220.046051671" Jan 04 11:51:45 crc kubenswrapper[5003]: I0104 11:51:45.190053 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-87zs8" Jan 04 11:51:45 crc kubenswrapper[5003]: I0104 11:51:45.335783 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c02a9169-294b-4986-823b-e77965bc257d-catalog-content\") pod \"c02a9169-294b-4986-823b-e77965bc257d\" (UID: \"c02a9169-294b-4986-823b-e77965bc257d\") " Jan 04 11:51:45 crc kubenswrapper[5003]: I0104 11:51:45.335839 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c02a9169-294b-4986-823b-e77965bc257d-utilities\") pod \"c02a9169-294b-4986-823b-e77965bc257d\" (UID: \"c02a9169-294b-4986-823b-e77965bc257d\") " Jan 04 11:51:45 crc kubenswrapper[5003]: I0104 11:51:45.335968 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qv7d7\" (UniqueName: \"kubernetes.io/projected/c02a9169-294b-4986-823b-e77965bc257d-kube-api-access-qv7d7\") pod \"c02a9169-294b-4986-823b-e77965bc257d\" (UID: \"c02a9169-294b-4986-823b-e77965bc257d\") " Jan 04 11:51:45 crc kubenswrapper[5003]: I0104 11:51:45.338526 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c02a9169-294b-4986-823b-e77965bc257d-utilities" (OuterVolumeSpecName: "utilities") pod "c02a9169-294b-4986-823b-e77965bc257d" (UID: "c02a9169-294b-4986-823b-e77965bc257d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:51:45 crc kubenswrapper[5003]: I0104 11:51:45.344219 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c02a9169-294b-4986-823b-e77965bc257d-kube-api-access-qv7d7" (OuterVolumeSpecName: "kube-api-access-qv7d7") pod "c02a9169-294b-4986-823b-e77965bc257d" (UID: "c02a9169-294b-4986-823b-e77965bc257d"). InnerVolumeSpecName "kube-api-access-qv7d7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:51:45 crc kubenswrapper[5003]: I0104 11:51:45.360644 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c02a9169-294b-4986-823b-e77965bc257d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c02a9169-294b-4986-823b-e77965bc257d" (UID: "c02a9169-294b-4986-823b-e77965bc257d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:51:45 crc kubenswrapper[5003]: I0104 11:51:45.440112 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qv7d7\" (UniqueName: \"kubernetes.io/projected/c02a9169-294b-4986-823b-e77965bc257d-kube-api-access-qv7d7\") on node \"crc\" DevicePath \"\"" Jan 04 11:51:45 crc kubenswrapper[5003]: I0104 11:51:45.440156 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c02a9169-294b-4986-823b-e77965bc257d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 11:51:45 crc kubenswrapper[5003]: I0104 11:51:45.440175 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c02a9169-294b-4986-823b-e77965bc257d-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 11:51:45 crc kubenswrapper[5003]: I0104 11:51:45.558826 5003 generic.go:334] "Generic (PLEG): container finished" podID="c02a9169-294b-4986-823b-e77965bc257d" containerID="ad5c2523671f165250816e5e54535cdfefd9339ae1aab19c83526ee5d7003dbc" exitCode=0 Jan 04 11:51:45 crc kubenswrapper[5003]: I0104 11:51:45.558868 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-87zs8" event={"ID":"c02a9169-294b-4986-823b-e77965bc257d","Type":"ContainerDied","Data":"ad5c2523671f165250816e5e54535cdfefd9339ae1aab19c83526ee5d7003dbc"} Jan 04 11:51:45 crc kubenswrapper[5003]: I0104 11:51:45.558893 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-87zs8" Jan 04 11:51:45 crc kubenswrapper[5003]: I0104 11:51:45.558907 5003 scope.go:117] "RemoveContainer" containerID="ad5c2523671f165250816e5e54535cdfefd9339ae1aab19c83526ee5d7003dbc" Jan 04 11:51:45 crc kubenswrapper[5003]: I0104 11:51:45.558896 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-87zs8" event={"ID":"c02a9169-294b-4986-823b-e77965bc257d","Type":"ContainerDied","Data":"d7da27be4b3233acf2b56bee12cfd53081a950c3cd2b9e050bcac0a19d467e5c"} Jan 04 11:51:45 crc kubenswrapper[5003]: I0104 11:51:45.585853 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-87zs8"] Jan 04 11:51:45 crc kubenswrapper[5003]: I0104 11:51:45.585944 5003 scope.go:117] "RemoveContainer" containerID="b54ab509f4f644268492504991b65d45c35fba4b4e6950ec5b9825139112dfd2" Jan 04 11:51:45 crc kubenswrapper[5003]: I0104 11:51:45.594568 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-87zs8"] Jan 04 11:51:45 crc kubenswrapper[5003]: I0104 11:51:45.617817 5003 scope.go:117] "RemoveContainer" containerID="abd2cb511cd1288aaed0bddb31acd3b89ebb338f60ccfb59cf8e508d26742104" Jan 04 11:51:45 crc kubenswrapper[5003]: I0104 11:51:45.638227 5003 scope.go:117] "RemoveContainer" containerID="ad5c2523671f165250816e5e54535cdfefd9339ae1aab19c83526ee5d7003dbc" Jan 04 11:51:45 crc kubenswrapper[5003]: E0104 11:51:45.639071 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad5c2523671f165250816e5e54535cdfefd9339ae1aab19c83526ee5d7003dbc\": container with ID starting with ad5c2523671f165250816e5e54535cdfefd9339ae1aab19c83526ee5d7003dbc not found: ID does not exist" containerID="ad5c2523671f165250816e5e54535cdfefd9339ae1aab19c83526ee5d7003dbc" Jan 04 11:51:45 crc kubenswrapper[5003]: I0104 11:51:45.639115 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad5c2523671f165250816e5e54535cdfefd9339ae1aab19c83526ee5d7003dbc"} err="failed to get container status \"ad5c2523671f165250816e5e54535cdfefd9339ae1aab19c83526ee5d7003dbc\": rpc error: code = NotFound desc = could not find container \"ad5c2523671f165250816e5e54535cdfefd9339ae1aab19c83526ee5d7003dbc\": container with ID starting with ad5c2523671f165250816e5e54535cdfefd9339ae1aab19c83526ee5d7003dbc not found: ID does not exist" Jan 04 11:51:45 crc kubenswrapper[5003]: I0104 11:51:45.639145 5003 scope.go:117] "RemoveContainer" containerID="b54ab509f4f644268492504991b65d45c35fba4b4e6950ec5b9825139112dfd2" Jan 04 11:51:45 crc kubenswrapper[5003]: E0104 11:51:45.639572 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b54ab509f4f644268492504991b65d45c35fba4b4e6950ec5b9825139112dfd2\": container with ID starting with b54ab509f4f644268492504991b65d45c35fba4b4e6950ec5b9825139112dfd2 not found: ID does not exist" containerID="b54ab509f4f644268492504991b65d45c35fba4b4e6950ec5b9825139112dfd2" Jan 04 11:51:45 crc kubenswrapper[5003]: I0104 11:51:45.639616 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b54ab509f4f644268492504991b65d45c35fba4b4e6950ec5b9825139112dfd2"} err="failed to get container status \"b54ab509f4f644268492504991b65d45c35fba4b4e6950ec5b9825139112dfd2\": rpc error: code = NotFound desc = could not find container \"b54ab509f4f644268492504991b65d45c35fba4b4e6950ec5b9825139112dfd2\": container with ID starting with b54ab509f4f644268492504991b65d45c35fba4b4e6950ec5b9825139112dfd2 not found: ID does not exist" Jan 04 11:51:45 crc kubenswrapper[5003]: I0104 11:51:45.639657 5003 scope.go:117] "RemoveContainer" containerID="abd2cb511cd1288aaed0bddb31acd3b89ebb338f60ccfb59cf8e508d26742104" Jan 04 11:51:45 crc kubenswrapper[5003]: E0104 11:51:45.640317 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"abd2cb511cd1288aaed0bddb31acd3b89ebb338f60ccfb59cf8e508d26742104\": container with ID starting with abd2cb511cd1288aaed0bddb31acd3b89ebb338f60ccfb59cf8e508d26742104 not found: ID does not exist" containerID="abd2cb511cd1288aaed0bddb31acd3b89ebb338f60ccfb59cf8e508d26742104" Jan 04 11:51:45 crc kubenswrapper[5003]: I0104 11:51:45.640353 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"abd2cb511cd1288aaed0bddb31acd3b89ebb338f60ccfb59cf8e508d26742104"} err="failed to get container status \"abd2cb511cd1288aaed0bddb31acd3b89ebb338f60ccfb59cf8e508d26742104\": rpc error: code = NotFound desc = could not find container \"abd2cb511cd1288aaed0bddb31acd3b89ebb338f60ccfb59cf8e508d26742104\": container with ID starting with abd2cb511cd1288aaed0bddb31acd3b89ebb338f60ccfb59cf8e508d26742104 not found: ID does not exist" Jan 04 11:51:46 crc kubenswrapper[5003]: I0104 11:51:46.570903 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-984zr" event={"ID":"43dd1701-8159-424a-b98b-c3dd5f2b9ad8","Type":"ContainerStarted","Data":"29e484016d95ba839a183a5e14bc0c75fade1246b6d2e7d2be6345a1a1030179"} Jan 04 11:51:46 crc kubenswrapper[5003]: I0104 11:51:46.573508 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7ghcn" event={"ID":"cf4075fd-a261-4d15-b6e3-02d8c346fe74","Type":"ContainerStarted","Data":"ce61c8dd89c7bfc58945322ffd47c42c172382856c04a0f02db4a657ae384fc8"} Jan 04 11:51:46 crc kubenswrapper[5003]: I0104 11:51:46.576232 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v47x4" event={"ID":"223eef6f-9ab2-48c7-9d00-c812abc17e96","Type":"ContainerStarted","Data":"edf26be6f32001375fcb8bde9321f7e65559906f0ab80c8e763a1e8d1d4aba1c"} Jan 04 11:51:46 crc kubenswrapper[5003]: I0104 11:51:46.580413 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gjbcx" event={"ID":"6c5933bf-668f-4062-998b-3b2c5ad3a811","Type":"ContainerStarted","Data":"1a3bb1c5dec92ba0fd4ff3674a5b9dc46cb3899f051fd6517cc26326fd3d6739"} Jan 04 11:51:46 crc kubenswrapper[5003]: I0104 11:51:46.595491 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-984zr" podStartSLOduration=4.16149577 podStartE2EDuration="1m17.595462015s" podCreationTimestamp="2026-01-04 11:50:29 +0000 UTC" firstStartedPulling="2026-01-04 11:50:31.483477363 +0000 UTC m=+146.956507204" lastFinishedPulling="2026-01-04 11:51:44.917443578 +0000 UTC m=+220.390473449" observedRunningTime="2026-01-04 11:51:46.592411294 +0000 UTC m=+222.065441135" watchObservedRunningTime="2026-01-04 11:51:46.595462015 +0000 UTC m=+222.068491856" Jan 04 11:51:46 crc kubenswrapper[5003]: I0104 11:51:46.616238 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gjbcx" podStartSLOduration=4.032480648 podStartE2EDuration="1m17.6162181s" podCreationTimestamp="2026-01-04 11:50:29 +0000 UTC" firstStartedPulling="2026-01-04 11:50:31.473373451 +0000 UTC m=+146.946403292" lastFinishedPulling="2026-01-04 11:51:45.057110903 +0000 UTC m=+220.530140744" observedRunningTime="2026-01-04 11:51:46.613200409 +0000 UTC m=+222.086230250" watchObservedRunningTime="2026-01-04 11:51:46.6162181 +0000 UTC m=+222.089247941" Jan 04 11:51:46 crc kubenswrapper[5003]: I0104 11:51:46.634246 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-v47x4" podStartSLOduration=4.602005131 podStartE2EDuration="1m13.634228942s" podCreationTimestamp="2026-01-04 11:50:33 +0000 UTC" firstStartedPulling="2026-01-04 11:50:35.896358415 +0000 UTC m=+151.369388256" lastFinishedPulling="2026-01-04 11:51:44.928582196 +0000 UTC m=+220.401612067" observedRunningTime="2026-01-04 11:51:46.632564397 +0000 UTC m=+222.105594248" watchObservedRunningTime="2026-01-04 11:51:46.634228942 +0000 UTC m=+222.107258783" Jan 04 11:51:46 crc kubenswrapper[5003]: I0104 11:51:46.813446 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c02a9169-294b-4986-823b-e77965bc257d" path="/var/lib/kubelet/pods/c02a9169-294b-4986-823b-e77965bc257d/volumes" Jan 04 11:51:49 crc kubenswrapper[5003]: I0104 11:51:49.784226 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-gjbcx" Jan 04 11:51:49 crc kubenswrapper[5003]: I0104 11:51:49.786149 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-gjbcx" Jan 04 11:51:49 crc kubenswrapper[5003]: I0104 11:51:49.845560 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-gjbcx" Jan 04 11:51:49 crc kubenswrapper[5003]: I0104 11:51:49.911944 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7ghcn" podStartSLOduration=8.883507257 podStartE2EDuration="1m18.911927858s" podCreationTimestamp="2026-01-04 11:50:31 +0000 UTC" firstStartedPulling="2026-01-04 11:50:34.902536599 +0000 UTC m=+150.375566440" lastFinishedPulling="2026-01-04 11:51:44.9309572 +0000 UTC m=+220.403987041" observedRunningTime="2026-01-04 11:51:46.65661924 +0000 UTC m=+222.129649081" watchObservedRunningTime="2026-01-04 11:51:49.911927858 +0000 UTC m=+225.384957699" Jan 04 11:51:49 crc kubenswrapper[5003]: I0104 11:51:49.963585 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mxr5x" Jan 04 11:51:49 crc kubenswrapper[5003]: I0104 11:51:49.963650 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mxr5x" Jan 04 11:51:50 crc kubenswrapper[5003]: I0104 11:51:50.007585 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mxr5x" Jan 04 11:51:50 crc kubenswrapper[5003]: I0104 11:51:50.170486 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-984zr" Jan 04 11:51:50 crc kubenswrapper[5003]: I0104 11:51:50.171338 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-984zr" Jan 04 11:51:50 crc kubenswrapper[5003]: I0104 11:51:50.217538 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-984zr" Jan 04 11:51:50 crc kubenswrapper[5003]: I0104 11:51:50.483362 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hsgvz" Jan 04 11:51:50 crc kubenswrapper[5003]: I0104 11:51:50.640719 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-gjbcx" Jan 04 11:51:50 crc kubenswrapper[5003]: I0104 11:51:50.641221 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mxr5x" Jan 04 11:51:50 crc kubenswrapper[5003]: I0104 11:51:50.655797 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-984zr" Jan 04 11:51:51 crc kubenswrapper[5003]: I0104 11:51:51.978762 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7ghcn" Jan 04 11:51:51 crc kubenswrapper[5003]: I0104 11:51:51.979117 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7ghcn" Jan 04 11:51:52 crc kubenswrapper[5003]: I0104 11:51:52.032632 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7ghcn" Jan 04 11:51:52 crc kubenswrapper[5003]: I0104 11:51:52.688722 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pd2px"] Jan 04 11:51:52 crc kubenswrapper[5003]: I0104 11:51:52.703404 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7ghcn" Jan 04 11:51:52 crc kubenswrapper[5003]: I0104 11:51:52.881135 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-984zr"] Jan 04 11:51:53 crc kubenswrapper[5003]: I0104 11:51:53.487807 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hsgvz"] Jan 04 11:51:53 crc kubenswrapper[5003]: I0104 11:51:53.488098 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hsgvz" podUID="7aedb7cd-d9d3-441b-af72-a11878071f3f" containerName="registry-server" containerID="cri-o://8c60d451778edc3179603f57392557166aa6af054ea34f5475f2c5ebf3d33fc2" gracePeriod=2 Jan 04 11:51:53 crc kubenswrapper[5003]: I0104 11:51:53.626195 5003 generic.go:334] "Generic (PLEG): container finished" podID="7aedb7cd-d9d3-441b-af72-a11878071f3f" containerID="8c60d451778edc3179603f57392557166aa6af054ea34f5475f2c5ebf3d33fc2" exitCode=0 Jan 04 11:51:53 crc kubenswrapper[5003]: I0104 11:51:53.626446 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-984zr" podUID="43dd1701-8159-424a-b98b-c3dd5f2b9ad8" containerName="registry-server" containerID="cri-o://29e484016d95ba839a183a5e14bc0c75fade1246b6d2e7d2be6345a1a1030179" gracePeriod=2 Jan 04 11:51:53 crc kubenswrapper[5003]: I0104 11:51:53.626530 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsgvz" event={"ID":"7aedb7cd-d9d3-441b-af72-a11878071f3f","Type":"ContainerDied","Data":"8c60d451778edc3179603f57392557166aa6af054ea34f5475f2c5ebf3d33fc2"} Jan 04 11:51:53 crc kubenswrapper[5003]: I0104 11:51:53.648042 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-v47x4" Jan 04 11:51:53 crc kubenswrapper[5003]: I0104 11:51:53.648096 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-v47x4" Jan 04 11:51:53 crc kubenswrapper[5003]: I0104 11:51:53.762374 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-v47x4" Jan 04 11:51:53 crc kubenswrapper[5003]: I0104 11:51:53.913975 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hsgvz" Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.053920 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7aedb7cd-d9d3-441b-af72-a11878071f3f-utilities\") pod \"7aedb7cd-d9d3-441b-af72-a11878071f3f\" (UID: \"7aedb7cd-d9d3-441b-af72-a11878071f3f\") " Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.054005 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c227h\" (UniqueName: \"kubernetes.io/projected/7aedb7cd-d9d3-441b-af72-a11878071f3f-kube-api-access-c227h\") pod \"7aedb7cd-d9d3-441b-af72-a11878071f3f\" (UID: \"7aedb7cd-d9d3-441b-af72-a11878071f3f\") " Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.054062 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7aedb7cd-d9d3-441b-af72-a11878071f3f-catalog-content\") pod \"7aedb7cd-d9d3-441b-af72-a11878071f3f\" (UID: \"7aedb7cd-d9d3-441b-af72-a11878071f3f\") " Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.054874 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7aedb7cd-d9d3-441b-af72-a11878071f3f-utilities" (OuterVolumeSpecName: "utilities") pod "7aedb7cd-d9d3-441b-af72-a11878071f3f" (UID: "7aedb7cd-d9d3-441b-af72-a11878071f3f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.060123 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7aedb7cd-d9d3-441b-af72-a11878071f3f-kube-api-access-c227h" (OuterVolumeSpecName: "kube-api-access-c227h") pod "7aedb7cd-d9d3-441b-af72-a11878071f3f" (UID: "7aedb7cd-d9d3-441b-af72-a11878071f3f"). InnerVolumeSpecName "kube-api-access-c227h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.112338 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7aedb7cd-d9d3-441b-af72-a11878071f3f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7aedb7cd-d9d3-441b-af72-a11878071f3f" (UID: "7aedb7cd-d9d3-441b-af72-a11878071f3f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.155618 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7aedb7cd-d9d3-441b-af72-a11878071f3f-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.155661 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c227h\" (UniqueName: \"kubernetes.io/projected/7aedb7cd-d9d3-441b-af72-a11878071f3f-kube-api-access-c227h\") on node \"crc\" DevicePath \"\"" Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.155678 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7aedb7cd-d9d3-441b-af72-a11878071f3f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.589315 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-984zr" Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.633991 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hsgvz" Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.633985 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hsgvz" event={"ID":"7aedb7cd-d9d3-441b-af72-a11878071f3f","Type":"ContainerDied","Data":"b09d955118e43a57b263d8e510d2ae6008d29dac544ab2c9c9383aedc8717fca"} Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.634179 5003 scope.go:117] "RemoveContainer" containerID="8c60d451778edc3179603f57392557166aa6af054ea34f5475f2c5ebf3d33fc2" Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.637692 5003 generic.go:334] "Generic (PLEG): container finished" podID="43dd1701-8159-424a-b98b-c3dd5f2b9ad8" containerID="29e484016d95ba839a183a5e14bc0c75fade1246b6d2e7d2be6345a1a1030179" exitCode=0 Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.637753 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-984zr" Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.637789 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-984zr" event={"ID":"43dd1701-8159-424a-b98b-c3dd5f2b9ad8","Type":"ContainerDied","Data":"29e484016d95ba839a183a5e14bc0c75fade1246b6d2e7d2be6345a1a1030179"} Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.637840 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-984zr" event={"ID":"43dd1701-8159-424a-b98b-c3dd5f2b9ad8","Type":"ContainerDied","Data":"0fd5f32bb6da7798069bba37ff4ed52560fe10d867142299b1a89a09a9375782"} Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.651342 5003 scope.go:117] "RemoveContainer" containerID="118495972f58a8328051b34e1ced586241f3c598cb5d52e7ce8205cfb074b178" Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.669909 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hsgvz"] Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.669974 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hsgvz"] Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.679359 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-v47x4" Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.683657 5003 scope.go:117] "RemoveContainer" containerID="ad8a0adcfae03e07cb6127b223b5dd07a3c3ccf629910df1c345e40b0d19f186" Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.705213 5003 scope.go:117] "RemoveContainer" containerID="29e484016d95ba839a183a5e14bc0c75fade1246b6d2e7d2be6345a1a1030179" Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.720364 5003 scope.go:117] "RemoveContainer" containerID="642c66d299ea148c1cc398835b7b28a3e0159bb76c69ad5fe08c3d5e75f02081" Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.736937 5003 scope.go:117] "RemoveContainer" containerID="1c5a6007e5547e4070348ab72205ec377b3baf6023c8434eb672011cc07ec99b" Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.748695 5003 scope.go:117] "RemoveContainer" containerID="29e484016d95ba839a183a5e14bc0c75fade1246b6d2e7d2be6345a1a1030179" Jan 04 11:51:54 crc kubenswrapper[5003]: E0104 11:51:54.748998 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29e484016d95ba839a183a5e14bc0c75fade1246b6d2e7d2be6345a1a1030179\": container with ID starting with 29e484016d95ba839a183a5e14bc0c75fade1246b6d2e7d2be6345a1a1030179 not found: ID does not exist" containerID="29e484016d95ba839a183a5e14bc0c75fade1246b6d2e7d2be6345a1a1030179" Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.749050 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29e484016d95ba839a183a5e14bc0c75fade1246b6d2e7d2be6345a1a1030179"} err="failed to get container status \"29e484016d95ba839a183a5e14bc0c75fade1246b6d2e7d2be6345a1a1030179\": rpc error: code = NotFound desc = could not find container \"29e484016d95ba839a183a5e14bc0c75fade1246b6d2e7d2be6345a1a1030179\": container with ID starting with 29e484016d95ba839a183a5e14bc0c75fade1246b6d2e7d2be6345a1a1030179 not found: ID does not exist" Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.749074 5003 scope.go:117] "RemoveContainer" containerID="642c66d299ea148c1cc398835b7b28a3e0159bb76c69ad5fe08c3d5e75f02081" Jan 04 11:51:54 crc kubenswrapper[5003]: E0104 11:51:54.749366 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"642c66d299ea148c1cc398835b7b28a3e0159bb76c69ad5fe08c3d5e75f02081\": container with ID starting with 642c66d299ea148c1cc398835b7b28a3e0159bb76c69ad5fe08c3d5e75f02081 not found: ID does not exist" containerID="642c66d299ea148c1cc398835b7b28a3e0159bb76c69ad5fe08c3d5e75f02081" Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.749385 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"642c66d299ea148c1cc398835b7b28a3e0159bb76c69ad5fe08c3d5e75f02081"} err="failed to get container status \"642c66d299ea148c1cc398835b7b28a3e0159bb76c69ad5fe08c3d5e75f02081\": rpc error: code = NotFound desc = could not find container \"642c66d299ea148c1cc398835b7b28a3e0159bb76c69ad5fe08c3d5e75f02081\": container with ID starting with 642c66d299ea148c1cc398835b7b28a3e0159bb76c69ad5fe08c3d5e75f02081 not found: ID does not exist" Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.749399 5003 scope.go:117] "RemoveContainer" containerID="1c5a6007e5547e4070348ab72205ec377b3baf6023c8434eb672011cc07ec99b" Jan 04 11:51:54 crc kubenswrapper[5003]: E0104 11:51:54.749606 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c5a6007e5547e4070348ab72205ec377b3baf6023c8434eb672011cc07ec99b\": container with ID starting with 1c5a6007e5547e4070348ab72205ec377b3baf6023c8434eb672011cc07ec99b not found: ID does not exist" containerID="1c5a6007e5547e4070348ab72205ec377b3baf6023c8434eb672011cc07ec99b" Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.749626 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c5a6007e5547e4070348ab72205ec377b3baf6023c8434eb672011cc07ec99b"} err="failed to get container status \"1c5a6007e5547e4070348ab72205ec377b3baf6023c8434eb672011cc07ec99b\": rpc error: code = NotFound desc = could not find container \"1c5a6007e5547e4070348ab72205ec377b3baf6023c8434eb672011cc07ec99b\": container with ID starting with 1c5a6007e5547e4070348ab72205ec377b3baf6023c8434eb672011cc07ec99b not found: ID does not exist" Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.772288 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qcnfb\" (UniqueName: \"kubernetes.io/projected/43dd1701-8159-424a-b98b-c3dd5f2b9ad8-kube-api-access-qcnfb\") pod \"43dd1701-8159-424a-b98b-c3dd5f2b9ad8\" (UID: \"43dd1701-8159-424a-b98b-c3dd5f2b9ad8\") " Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.772577 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43dd1701-8159-424a-b98b-c3dd5f2b9ad8-utilities\") pod \"43dd1701-8159-424a-b98b-c3dd5f2b9ad8\" (UID: \"43dd1701-8159-424a-b98b-c3dd5f2b9ad8\") " Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.772674 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43dd1701-8159-424a-b98b-c3dd5f2b9ad8-catalog-content\") pod \"43dd1701-8159-424a-b98b-c3dd5f2b9ad8\" (UID: \"43dd1701-8159-424a-b98b-c3dd5f2b9ad8\") " Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.773391 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43dd1701-8159-424a-b98b-c3dd5f2b9ad8-utilities" (OuterVolumeSpecName: "utilities") pod "43dd1701-8159-424a-b98b-c3dd5f2b9ad8" (UID: "43dd1701-8159-424a-b98b-c3dd5f2b9ad8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.775599 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43dd1701-8159-424a-b98b-c3dd5f2b9ad8-kube-api-access-qcnfb" (OuterVolumeSpecName: "kube-api-access-qcnfb") pod "43dd1701-8159-424a-b98b-c3dd5f2b9ad8" (UID: "43dd1701-8159-424a-b98b-c3dd5f2b9ad8"). InnerVolumeSpecName "kube-api-access-qcnfb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.818208 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7aedb7cd-d9d3-441b-af72-a11878071f3f" path="/var/lib/kubelet/pods/7aedb7cd-d9d3-441b-af72-a11878071f3f/volumes" Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.826829 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43dd1701-8159-424a-b98b-c3dd5f2b9ad8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "43dd1701-8159-424a-b98b-c3dd5f2b9ad8" (UID: "43dd1701-8159-424a-b98b-c3dd5f2b9ad8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.874803 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43dd1701-8159-424a-b98b-c3dd5f2b9ad8-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.874848 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43dd1701-8159-424a-b98b-c3dd5f2b9ad8-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.874863 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qcnfb\" (UniqueName: \"kubernetes.io/projected/43dd1701-8159-424a-b98b-c3dd5f2b9ad8-kube-api-access-qcnfb\") on node \"crc\" DevicePath \"\"" Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.964934 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-984zr"] Jan 04 11:51:54 crc kubenswrapper[5003]: I0104 11:51:54.966709 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-984zr"] Jan 04 11:51:55 crc kubenswrapper[5003]: I0104 11:51:55.282531 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-v47x4"] Jan 04 11:51:56 crc kubenswrapper[5003]: I0104 11:51:56.653165 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-v47x4" podUID="223eef6f-9ab2-48c7-9d00-c812abc17e96" containerName="registry-server" containerID="cri-o://edf26be6f32001375fcb8bde9321f7e65559906f0ab80c8e763a1e8d1d4aba1c" gracePeriod=2 Jan 04 11:51:56 crc kubenswrapper[5003]: I0104 11:51:56.813621 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43dd1701-8159-424a-b98b-c3dd5f2b9ad8" path="/var/lib/kubelet/pods/43dd1701-8159-424a-b98b-c3dd5f2b9ad8/volumes" Jan 04 11:51:56 crc kubenswrapper[5003]: I0104 11:51:56.992867 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v47x4" Jan 04 11:51:57 crc kubenswrapper[5003]: I0104 11:51:57.105198 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-565jz\" (UniqueName: \"kubernetes.io/projected/223eef6f-9ab2-48c7-9d00-c812abc17e96-kube-api-access-565jz\") pod \"223eef6f-9ab2-48c7-9d00-c812abc17e96\" (UID: \"223eef6f-9ab2-48c7-9d00-c812abc17e96\") " Jan 04 11:51:57 crc kubenswrapper[5003]: I0104 11:51:57.105313 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/223eef6f-9ab2-48c7-9d00-c812abc17e96-catalog-content\") pod \"223eef6f-9ab2-48c7-9d00-c812abc17e96\" (UID: \"223eef6f-9ab2-48c7-9d00-c812abc17e96\") " Jan 04 11:51:57 crc kubenswrapper[5003]: I0104 11:51:57.105367 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/223eef6f-9ab2-48c7-9d00-c812abc17e96-utilities\") pod \"223eef6f-9ab2-48c7-9d00-c812abc17e96\" (UID: \"223eef6f-9ab2-48c7-9d00-c812abc17e96\") " Jan 04 11:51:57 crc kubenswrapper[5003]: I0104 11:51:57.106632 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/223eef6f-9ab2-48c7-9d00-c812abc17e96-utilities" (OuterVolumeSpecName: "utilities") pod "223eef6f-9ab2-48c7-9d00-c812abc17e96" (UID: "223eef6f-9ab2-48c7-9d00-c812abc17e96"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:51:57 crc kubenswrapper[5003]: I0104 11:51:57.114245 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/223eef6f-9ab2-48c7-9d00-c812abc17e96-kube-api-access-565jz" (OuterVolumeSpecName: "kube-api-access-565jz") pod "223eef6f-9ab2-48c7-9d00-c812abc17e96" (UID: "223eef6f-9ab2-48c7-9d00-c812abc17e96"). InnerVolumeSpecName "kube-api-access-565jz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:51:57 crc kubenswrapper[5003]: I0104 11:51:57.206270 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/223eef6f-9ab2-48c7-9d00-c812abc17e96-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 11:51:57 crc kubenswrapper[5003]: I0104 11:51:57.206582 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-565jz\" (UniqueName: \"kubernetes.io/projected/223eef6f-9ab2-48c7-9d00-c812abc17e96-kube-api-access-565jz\") on node \"crc\" DevicePath \"\"" Jan 04 11:51:57 crc kubenswrapper[5003]: I0104 11:51:57.216851 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/223eef6f-9ab2-48c7-9d00-c812abc17e96-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "223eef6f-9ab2-48c7-9d00-c812abc17e96" (UID: "223eef6f-9ab2-48c7-9d00-c812abc17e96"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:51:57 crc kubenswrapper[5003]: I0104 11:51:57.307455 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/223eef6f-9ab2-48c7-9d00-c812abc17e96-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 11:51:57 crc kubenswrapper[5003]: I0104 11:51:57.661092 5003 generic.go:334] "Generic (PLEG): container finished" podID="223eef6f-9ab2-48c7-9d00-c812abc17e96" containerID="edf26be6f32001375fcb8bde9321f7e65559906f0ab80c8e763a1e8d1d4aba1c" exitCode=0 Jan 04 11:51:57 crc kubenswrapper[5003]: I0104 11:51:57.661138 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v47x4" event={"ID":"223eef6f-9ab2-48c7-9d00-c812abc17e96","Type":"ContainerDied","Data":"edf26be6f32001375fcb8bde9321f7e65559906f0ab80c8e763a1e8d1d4aba1c"} Jan 04 11:51:57 crc kubenswrapper[5003]: I0104 11:51:57.661194 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-v47x4" event={"ID":"223eef6f-9ab2-48c7-9d00-c812abc17e96","Type":"ContainerDied","Data":"96232ddbbd732c95410a895784734fda8644cda9cf1d1884d0d19cc20d9877f3"} Jan 04 11:51:57 crc kubenswrapper[5003]: I0104 11:51:57.661191 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-v47x4" Jan 04 11:51:57 crc kubenswrapper[5003]: I0104 11:51:57.661216 5003 scope.go:117] "RemoveContainer" containerID="edf26be6f32001375fcb8bde9321f7e65559906f0ab80c8e763a1e8d1d4aba1c" Jan 04 11:51:57 crc kubenswrapper[5003]: I0104 11:51:57.680515 5003 scope.go:117] "RemoveContainer" containerID="5bb2a70850a41229f0da99d41e14db4282a3b451440a7ef4d14f3513e8efae8e" Jan 04 11:51:57 crc kubenswrapper[5003]: I0104 11:51:57.696889 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-v47x4"] Jan 04 11:51:57 crc kubenswrapper[5003]: I0104 11:51:57.699901 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-v47x4"] Jan 04 11:51:57 crc kubenswrapper[5003]: I0104 11:51:57.716242 5003 scope.go:117] "RemoveContainer" containerID="8dd70cb904827eb33a174c58d53a3d48b3470c6ad2d868cf787986b391762893" Jan 04 11:51:57 crc kubenswrapper[5003]: I0104 11:51:57.729370 5003 scope.go:117] "RemoveContainer" containerID="edf26be6f32001375fcb8bde9321f7e65559906f0ab80c8e763a1e8d1d4aba1c" Jan 04 11:51:57 crc kubenswrapper[5003]: E0104 11:51:57.729768 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"edf26be6f32001375fcb8bde9321f7e65559906f0ab80c8e763a1e8d1d4aba1c\": container with ID starting with edf26be6f32001375fcb8bde9321f7e65559906f0ab80c8e763a1e8d1d4aba1c not found: ID does not exist" containerID="edf26be6f32001375fcb8bde9321f7e65559906f0ab80c8e763a1e8d1d4aba1c" Jan 04 11:51:57 crc kubenswrapper[5003]: I0104 11:51:57.729808 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"edf26be6f32001375fcb8bde9321f7e65559906f0ab80c8e763a1e8d1d4aba1c"} err="failed to get container status \"edf26be6f32001375fcb8bde9321f7e65559906f0ab80c8e763a1e8d1d4aba1c\": rpc error: code = NotFound desc = could not find container \"edf26be6f32001375fcb8bde9321f7e65559906f0ab80c8e763a1e8d1d4aba1c\": container with ID starting with edf26be6f32001375fcb8bde9321f7e65559906f0ab80c8e763a1e8d1d4aba1c not found: ID does not exist" Jan 04 11:51:57 crc kubenswrapper[5003]: I0104 11:51:57.729842 5003 scope.go:117] "RemoveContainer" containerID="5bb2a70850a41229f0da99d41e14db4282a3b451440a7ef4d14f3513e8efae8e" Jan 04 11:51:57 crc kubenswrapper[5003]: E0104 11:51:57.730159 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5bb2a70850a41229f0da99d41e14db4282a3b451440a7ef4d14f3513e8efae8e\": container with ID starting with 5bb2a70850a41229f0da99d41e14db4282a3b451440a7ef4d14f3513e8efae8e not found: ID does not exist" containerID="5bb2a70850a41229f0da99d41e14db4282a3b451440a7ef4d14f3513e8efae8e" Jan 04 11:51:57 crc kubenswrapper[5003]: I0104 11:51:57.730190 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bb2a70850a41229f0da99d41e14db4282a3b451440a7ef4d14f3513e8efae8e"} err="failed to get container status \"5bb2a70850a41229f0da99d41e14db4282a3b451440a7ef4d14f3513e8efae8e\": rpc error: code = NotFound desc = could not find container \"5bb2a70850a41229f0da99d41e14db4282a3b451440a7ef4d14f3513e8efae8e\": container with ID starting with 5bb2a70850a41229f0da99d41e14db4282a3b451440a7ef4d14f3513e8efae8e not found: ID does not exist" Jan 04 11:51:57 crc kubenswrapper[5003]: I0104 11:51:57.730215 5003 scope.go:117] "RemoveContainer" containerID="8dd70cb904827eb33a174c58d53a3d48b3470c6ad2d868cf787986b391762893" Jan 04 11:51:57 crc kubenswrapper[5003]: E0104 11:51:57.730526 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8dd70cb904827eb33a174c58d53a3d48b3470c6ad2d868cf787986b391762893\": container with ID starting with 8dd70cb904827eb33a174c58d53a3d48b3470c6ad2d868cf787986b391762893 not found: ID does not exist" containerID="8dd70cb904827eb33a174c58d53a3d48b3470c6ad2d868cf787986b391762893" Jan 04 11:51:57 crc kubenswrapper[5003]: I0104 11:51:57.730558 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8dd70cb904827eb33a174c58d53a3d48b3470c6ad2d868cf787986b391762893"} err="failed to get container status \"8dd70cb904827eb33a174c58d53a3d48b3470c6ad2d868cf787986b391762893\": rpc error: code = NotFound desc = could not find container \"8dd70cb904827eb33a174c58d53a3d48b3470c6ad2d868cf787986b391762893\": container with ID starting with 8dd70cb904827eb33a174c58d53a3d48b3470c6ad2d868cf787986b391762893 not found: ID does not exist" Jan 04 11:51:58 crc kubenswrapper[5003]: I0104 11:51:58.813554 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="223eef6f-9ab2-48c7-9d00-c812abc17e96" path="/var/lib/kubelet/pods/223eef6f-9ab2-48c7-9d00-c812abc17e96/volumes" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.376081 5003 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 04 11:51:59 crc kubenswrapper[5003]: E0104 11:51:59.376397 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43dd1701-8159-424a-b98b-c3dd5f2b9ad8" containerName="registry-server" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.376413 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="43dd1701-8159-424a-b98b-c3dd5f2b9ad8" containerName="registry-server" Jan 04 11:51:59 crc kubenswrapper[5003]: E0104 11:51:59.376421 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7aedb7cd-d9d3-441b-af72-a11878071f3f" containerName="extract-utilities" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.376427 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="7aedb7cd-d9d3-441b-af72-a11878071f3f" containerName="extract-utilities" Jan 04 11:51:59 crc kubenswrapper[5003]: E0104 11:51:59.376436 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="223eef6f-9ab2-48c7-9d00-c812abc17e96" containerName="registry-server" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.376441 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="223eef6f-9ab2-48c7-9d00-c812abc17e96" containerName="registry-server" Jan 04 11:51:59 crc kubenswrapper[5003]: E0104 11:51:59.376454 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7aedb7cd-d9d3-441b-af72-a11878071f3f" containerName="registry-server" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.376459 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="7aedb7cd-d9d3-441b-af72-a11878071f3f" containerName="registry-server" Jan 04 11:51:59 crc kubenswrapper[5003]: E0104 11:51:59.376469 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c02a9169-294b-4986-823b-e77965bc257d" containerName="extract-utilities" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.376475 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c02a9169-294b-4986-823b-e77965bc257d" containerName="extract-utilities" Jan 04 11:51:59 crc kubenswrapper[5003]: E0104 11:51:59.376482 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="223eef6f-9ab2-48c7-9d00-c812abc17e96" containerName="extract-content" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.376487 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="223eef6f-9ab2-48c7-9d00-c812abc17e96" containerName="extract-content" Jan 04 11:51:59 crc kubenswrapper[5003]: E0104 11:51:59.376495 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43dd1701-8159-424a-b98b-c3dd5f2b9ad8" containerName="extract-utilities" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.376501 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="43dd1701-8159-424a-b98b-c3dd5f2b9ad8" containerName="extract-utilities" Jan 04 11:51:59 crc kubenswrapper[5003]: E0104 11:51:59.376509 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="223eef6f-9ab2-48c7-9d00-c812abc17e96" containerName="extract-utilities" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.376515 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="223eef6f-9ab2-48c7-9d00-c812abc17e96" containerName="extract-utilities" Jan 04 11:51:59 crc kubenswrapper[5003]: E0104 11:51:59.376549 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e8da04b-cd02-4b5c-9dd8-28fd1225cb77" containerName="pruner" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.376555 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e8da04b-cd02-4b5c-9dd8-28fd1225cb77" containerName="pruner" Jan 04 11:51:59 crc kubenswrapper[5003]: E0104 11:51:59.376564 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c02a9169-294b-4986-823b-e77965bc257d" containerName="extract-content" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.376571 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c02a9169-294b-4986-823b-e77965bc257d" containerName="extract-content" Jan 04 11:51:59 crc kubenswrapper[5003]: E0104 11:51:59.376582 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c02a9169-294b-4986-823b-e77965bc257d" containerName="registry-server" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.376588 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c02a9169-294b-4986-823b-e77965bc257d" containerName="registry-server" Jan 04 11:51:59 crc kubenswrapper[5003]: E0104 11:51:59.376595 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7aedb7cd-d9d3-441b-af72-a11878071f3f" containerName="extract-content" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.376600 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="7aedb7cd-d9d3-441b-af72-a11878071f3f" containerName="extract-content" Jan 04 11:51:59 crc kubenswrapper[5003]: E0104 11:51:59.376611 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43dd1701-8159-424a-b98b-c3dd5f2b9ad8" containerName="extract-content" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.376616 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="43dd1701-8159-424a-b98b-c3dd5f2b9ad8" containerName="extract-content" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.376713 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="7aedb7cd-d9d3-441b-af72-a11878071f3f" containerName="registry-server" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.376724 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="223eef6f-9ab2-48c7-9d00-c812abc17e96" containerName="registry-server" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.376735 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e8da04b-cd02-4b5c-9dd8-28fd1225cb77" containerName="pruner" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.376743 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="c02a9169-294b-4986-823b-e77965bc257d" containerName="registry-server" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.376751 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="43dd1701-8159-424a-b98b-c3dd5f2b9ad8" containerName="registry-server" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.377316 5003 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.377438 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.377574 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69" gracePeriod=15 Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.377932 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423" gracePeriod=15 Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.378077 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655" gracePeriod=15 Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.378139 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b" gracePeriod=15 Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.378198 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10" gracePeriod=15 Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.379954 5003 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 04 11:51:59 crc kubenswrapper[5003]: E0104 11:51:59.380161 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.380183 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 04 11:51:59 crc kubenswrapper[5003]: E0104 11:51:59.380203 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.380210 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 04 11:51:59 crc kubenswrapper[5003]: E0104 11:51:59.380222 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.380230 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 04 11:51:59 crc kubenswrapper[5003]: E0104 11:51:59.380241 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.380248 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 04 11:51:59 crc kubenswrapper[5003]: E0104 11:51:59.380259 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.380267 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 04 11:51:59 crc kubenswrapper[5003]: E0104 11:51:59.380279 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.380287 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.380405 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.380423 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.380434 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.380444 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.380454 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 04 11:51:59 crc kubenswrapper[5003]: E0104 11:51:59.380562 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.380572 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.380688 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.418118 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.538544 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.538623 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.538658 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.538685 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.538705 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.538746 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.538766 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.538783 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.640192 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.640278 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.640326 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.640339 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.640394 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.640400 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.640414 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.640408 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.640470 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.640436 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.640476 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.640442 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.640640 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.640719 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.640727 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.640754 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.675559 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.676926 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.677782 5003 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423" exitCode=0 Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.677900 5003 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10" exitCode=0 Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.677980 5003 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655" exitCode=0 Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.678065 5003 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b" exitCode=2 Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.677841 5003 scope.go:117] "RemoveContainer" containerID="d9a91e66710cd178c5d99eab42f9f7b4b305856ee5712f9b1eb18c1ca961cb84" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.680148 5003 generic.go:334] "Generic (PLEG): container finished" podID="a9e44ab2-84f1-4907-916e-6c04d89cc1bb" containerID="f98bdb7ea48f6ecf6e09abf79f23be0bb216d871a971af34027a0c43fb6d502a" exitCode=0 Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.680198 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"a9e44ab2-84f1-4907-916e-6c04d89cc1bb","Type":"ContainerDied","Data":"f98bdb7ea48f6ecf6e09abf79f23be0bb216d871a971af34027a0c43fb6d502a"} Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.681180 5003 status_manager.go:851] "Failed to get status for pod" podUID="a9e44ab2-84f1-4907-916e-6c04d89cc1bb" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.5:6443: connect: connection refused" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.681827 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.5:6443: connect: connection refused" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.682232 5003 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.5:6443: connect: connection refused" Jan 04 11:51:59 crc kubenswrapper[5003]: I0104 11:51:59.714074 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:51:59 crc kubenswrapper[5003]: W0104 11:51:59.728293 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-77d2fd9be3a809431c2efb92c465ae717e2dcdd6c13051fd94cae722c0e6bd2d WatchSource:0}: Error finding container 77d2fd9be3a809431c2efb92c465ae717e2dcdd6c13051fd94cae722c0e6bd2d: Status 404 returned error can't find the container with id 77d2fd9be3a809431c2efb92c465ae717e2dcdd6c13051fd94cae722c0e6bd2d Jan 04 11:51:59 crc kubenswrapper[5003]: E0104 11:51:59.731141 5003 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.5:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188784ea1842d5c6 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-04 11:51:59.730456006 +0000 UTC m=+235.203485847,LastTimestamp:2026-01-04 11:51:59.730456006 +0000 UTC m=+235.203485847,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 04 11:52:00 crc kubenswrapper[5003]: I0104 11:52:00.687041 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"8447ab6e523f09640dda665fc0815c20d1eedbda20860a4681d59ccb1eb817bd"} Jan 04 11:52:00 crc kubenswrapper[5003]: I0104 11:52:00.687499 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"77d2fd9be3a809431c2efb92c465ae717e2dcdd6c13051fd94cae722c0e6bd2d"} Jan 04 11:52:00 crc kubenswrapper[5003]: I0104 11:52:00.687785 5003 status_manager.go:851] "Failed to get status for pod" podUID="a9e44ab2-84f1-4907-916e-6c04d89cc1bb" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.5:6443: connect: connection refused" Jan 04 11:52:00 crc kubenswrapper[5003]: I0104 11:52:00.687980 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.5:6443: connect: connection refused" Jan 04 11:52:00 crc kubenswrapper[5003]: I0104 11:52:00.688475 5003 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.5:6443: connect: connection refused" Jan 04 11:52:00 crc kubenswrapper[5003]: I0104 11:52:00.690521 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 04 11:52:00 crc kubenswrapper[5003]: I0104 11:52:00.931352 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 04 11:52:00 crc kubenswrapper[5003]: I0104 11:52:00.932378 5003 status_manager.go:851] "Failed to get status for pod" podUID="a9e44ab2-84f1-4907-916e-6c04d89cc1bb" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.5:6443: connect: connection refused" Jan 04 11:52:00 crc kubenswrapper[5003]: I0104 11:52:00.932862 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.5:6443: connect: connection refused" Jan 04 11:52:00 crc kubenswrapper[5003]: I0104 11:52:00.959212 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/a9e44ab2-84f1-4907-916e-6c04d89cc1bb-var-lock\") pod \"a9e44ab2-84f1-4907-916e-6c04d89cc1bb\" (UID: \"a9e44ab2-84f1-4907-916e-6c04d89cc1bb\") " Jan 04 11:52:00 crc kubenswrapper[5003]: I0104 11:52:00.959265 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a9e44ab2-84f1-4907-916e-6c04d89cc1bb-kube-api-access\") pod \"a9e44ab2-84f1-4907-916e-6c04d89cc1bb\" (UID: \"a9e44ab2-84f1-4907-916e-6c04d89cc1bb\") " Jan 04 11:52:00 crc kubenswrapper[5003]: I0104 11:52:00.959322 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a9e44ab2-84f1-4907-916e-6c04d89cc1bb-kubelet-dir\") pod \"a9e44ab2-84f1-4907-916e-6c04d89cc1bb\" (UID: \"a9e44ab2-84f1-4907-916e-6c04d89cc1bb\") " Jan 04 11:52:00 crc kubenswrapper[5003]: I0104 11:52:00.959635 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a9e44ab2-84f1-4907-916e-6c04d89cc1bb-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "a9e44ab2-84f1-4907-916e-6c04d89cc1bb" (UID: "a9e44ab2-84f1-4907-916e-6c04d89cc1bb"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:52:00 crc kubenswrapper[5003]: I0104 11:52:00.959649 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a9e44ab2-84f1-4907-916e-6c04d89cc1bb-var-lock" (OuterVolumeSpecName: "var-lock") pod "a9e44ab2-84f1-4907-916e-6c04d89cc1bb" (UID: "a9e44ab2-84f1-4907-916e-6c04d89cc1bb"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:52:00 crc kubenswrapper[5003]: I0104 11:52:00.968205 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9e44ab2-84f1-4907-916e-6c04d89cc1bb-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "a9e44ab2-84f1-4907-916e-6c04d89cc1bb" (UID: "a9e44ab2-84f1-4907-916e-6c04d89cc1bb"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:52:01 crc kubenswrapper[5003]: I0104 11:52:01.060511 5003 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a9e44ab2-84f1-4907-916e-6c04d89cc1bb-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:01 crc kubenswrapper[5003]: I0104 11:52:01.060545 5003 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/a9e44ab2-84f1-4907-916e-6c04d89cc1bb-var-lock\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:01 crc kubenswrapper[5003]: I0104 11:52:01.060557 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a9e44ab2-84f1-4907-916e-6c04d89cc1bb-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:01 crc kubenswrapper[5003]: I0104 11:52:01.700430 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"a9e44ab2-84f1-4907-916e-6c04d89cc1bb","Type":"ContainerDied","Data":"bba97a294c9299684d70412ceab603c9ba6932f0edbcd8f7d97d1ea2a7e99305"} Jan 04 11:52:01 crc kubenswrapper[5003]: I0104 11:52:01.700908 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bba97a294c9299684d70412ceab603c9ba6932f0edbcd8f7d97d1ea2a7e99305" Jan 04 11:52:01 crc kubenswrapper[5003]: I0104 11:52:01.700492 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 04 11:52:01 crc kubenswrapper[5003]: I0104 11:52:01.720141 5003 status_manager.go:851] "Failed to get status for pod" podUID="a9e44ab2-84f1-4907-916e-6c04d89cc1bb" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.5:6443: connect: connection refused" Jan 04 11:52:01 crc kubenswrapper[5003]: I0104 11:52:01.720399 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.5:6443: connect: connection refused" Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.625711 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.627761 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.628294 5003 status_manager.go:851] "Failed to get status for pod" podUID="a9e44ab2-84f1-4907-916e-6c04d89cc1bb" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.5:6443: connect: connection refused" Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.628524 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.5:6443: connect: connection refused" Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.629341 5003 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.5:6443: connect: connection refused" Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.715060 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.715893 5003 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69" exitCode=0 Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.715954 5003 scope.go:117] "RemoveContainer" containerID="69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423" Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.715993 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.731233 5003 scope.go:117] "RemoveContainer" containerID="d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10" Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.743595 5003 scope.go:117] "RemoveContainer" containerID="b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655" Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.754914 5003 scope.go:117] "RemoveContainer" containerID="553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b" Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.767121 5003 scope.go:117] "RemoveContainer" containerID="0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69" Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.780992 5003 scope.go:117] "RemoveContainer" containerID="5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2" Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.798077 5003 scope.go:117] "RemoveContainer" containerID="69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423" Jan 04 11:52:03 crc kubenswrapper[5003]: E0104 11:52:03.798464 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\": container with ID starting with 69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423 not found: ID does not exist" containerID="69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423" Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.798504 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423"} err="failed to get container status \"69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\": rpc error: code = NotFound desc = could not find container \"69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423\": container with ID starting with 69eba9e671cd35de34b5510c6e349709fa85ad61b53eb4363919afaadc511423 not found: ID does not exist" Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.798529 5003 scope.go:117] "RemoveContainer" containerID="d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10" Jan 04 11:52:03 crc kubenswrapper[5003]: E0104 11:52:03.798890 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\": container with ID starting with d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10 not found: ID does not exist" containerID="d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10" Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.798963 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10"} err="failed to get container status \"d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\": rpc error: code = NotFound desc = could not find container \"d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10\": container with ID starting with d55f6d71e1daadeb66c40993230b7299f841acfb8f8de0a1a22a2728629eee10 not found: ID does not exist" Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.799053 5003 scope.go:117] "RemoveContainer" containerID="b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655" Jan 04 11:52:03 crc kubenswrapper[5003]: E0104 11:52:03.799341 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\": container with ID starting with b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655 not found: ID does not exist" containerID="b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655" Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.799368 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655"} err="failed to get container status \"b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\": rpc error: code = NotFound desc = could not find container \"b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655\": container with ID starting with b7b124edc217466db9f93b80d52192ca3cbbfd3c9f6d2b2cd7e0fb2988b10655 not found: ID does not exist" Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.799384 5003 scope.go:117] "RemoveContainer" containerID="553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b" Jan 04 11:52:03 crc kubenswrapper[5003]: E0104 11:52:03.799600 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\": container with ID starting with 553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b not found: ID does not exist" containerID="553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b" Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.799660 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b"} err="failed to get container status \"553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\": rpc error: code = NotFound desc = could not find container \"553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b\": container with ID starting with 553239744fcc820cecde10a3a52520d1ccf1743a3f78d03630334ca0c00dfb8b not found: ID does not exist" Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.799694 5003 scope.go:117] "RemoveContainer" containerID="0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69" Jan 04 11:52:03 crc kubenswrapper[5003]: E0104 11:52:03.799939 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\": container with ID starting with 0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69 not found: ID does not exist" containerID="0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69" Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.799963 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69"} err="failed to get container status \"0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\": rpc error: code = NotFound desc = could not find container \"0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69\": container with ID starting with 0f27cd4998727a3002c50144e56138c772f6c686ad3bb9266c473d9b9cf3df69 not found: ID does not exist" Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.799980 5003 scope.go:117] "RemoveContainer" containerID="5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2" Jan 04 11:52:03 crc kubenswrapper[5003]: E0104 11:52:03.800207 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\": container with ID starting with 5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2 not found: ID does not exist" containerID="5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2" Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.800233 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2"} err="failed to get container status \"5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\": rpc error: code = NotFound desc = could not find container \"5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2\": container with ID starting with 5f6059a71a4fd6906232e36afae43af69e316e1d28e06ee6abf38989a830fbb2 not found: ID does not exist" Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.803909 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.803964 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.803979 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.804049 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.804087 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.804103 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.804296 5003 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.804312 5003 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:03 crc kubenswrapper[5003]: I0104 11:52:03.804321 5003 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:04 crc kubenswrapper[5003]: I0104 11:52:04.036048 5003 status_manager.go:851] "Failed to get status for pod" podUID="a9e44ab2-84f1-4907-916e-6c04d89cc1bb" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.5:6443: connect: connection refused" Jan 04 11:52:04 crc kubenswrapper[5003]: I0104 11:52:04.036437 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.5:6443: connect: connection refused" Jan 04 11:52:04 crc kubenswrapper[5003]: I0104 11:52:04.036884 5003 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.5:6443: connect: connection refused" Jan 04 11:52:04 crc kubenswrapper[5003]: I0104 11:52:04.810560 5003 status_manager.go:851] "Failed to get status for pod" podUID="a9e44ab2-84f1-4907-916e-6c04d89cc1bb" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.5:6443: connect: connection refused" Jan 04 11:52:04 crc kubenswrapper[5003]: I0104 11:52:04.811199 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.5:6443: connect: connection refused" Jan 04 11:52:04 crc kubenswrapper[5003]: I0104 11:52:04.811690 5003 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.5:6443: connect: connection refused" Jan 04 11:52:04 crc kubenswrapper[5003]: I0104 11:52:04.835095 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Jan 04 11:52:07 crc kubenswrapper[5003]: E0104 11:52:07.925874 5003 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.5:6443: connect: connection refused" Jan 04 11:52:07 crc kubenswrapper[5003]: E0104 11:52:07.927028 5003 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.5:6443: connect: connection refused" Jan 04 11:52:07 crc kubenswrapper[5003]: E0104 11:52:07.927399 5003 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.5:6443: connect: connection refused" Jan 04 11:52:07 crc kubenswrapper[5003]: E0104 11:52:07.927788 5003 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.5:6443: connect: connection refused" Jan 04 11:52:07 crc kubenswrapper[5003]: E0104 11:52:07.928309 5003 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.5:6443: connect: connection refused" Jan 04 11:52:07 crc kubenswrapper[5003]: I0104 11:52:07.928359 5003 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Jan 04 11:52:07 crc kubenswrapper[5003]: E0104 11:52:07.928743 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.5:6443: connect: connection refused" interval="200ms" Jan 04 11:52:08 crc kubenswrapper[5003]: E0104 11:52:08.130545 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.5:6443: connect: connection refused" interval="400ms" Jan 04 11:52:08 crc kubenswrapper[5003]: E0104 11:52:08.531769 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.5:6443: connect: connection refused" interval="800ms" Jan 04 11:52:09 crc kubenswrapper[5003]: E0104 11:52:09.312903 5003 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.5:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188784ea1842d5c6 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-04 11:51:59.730456006 +0000 UTC m=+235.203485847,LastTimestamp:2026-01-04 11:51:59.730456006 +0000 UTC m=+235.203485847,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 04 11:52:09 crc kubenswrapper[5003]: E0104 11:52:09.333421 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.5:6443: connect: connection refused" interval="1.6s" Jan 04 11:52:10 crc kubenswrapper[5003]: E0104 11:52:10.935039 5003 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.5:6443: connect: connection refused" interval="3.2s" Jan 04 11:52:11 crc kubenswrapper[5003]: I0104 11:52:11.806082 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:11 crc kubenswrapper[5003]: I0104 11:52:11.808488 5003 status_manager.go:851] "Failed to get status for pod" podUID="a9e44ab2-84f1-4907-916e-6c04d89cc1bb" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.5:6443: connect: connection refused" Jan 04 11:52:11 crc kubenswrapper[5003]: I0104 11:52:11.808880 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.5:6443: connect: connection refused" Jan 04 11:52:11 crc kubenswrapper[5003]: I0104 11:52:11.825439 5003 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ed41329b-30c2-4489-9315-534f0431252c" Jan 04 11:52:11 crc kubenswrapper[5003]: I0104 11:52:11.825503 5003 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ed41329b-30c2-4489-9315-534f0431252c" Jan 04 11:52:11 crc kubenswrapper[5003]: E0104 11:52:11.826178 5003 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.5:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:11 crc kubenswrapper[5003]: I0104 11:52:11.826814 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:12 crc kubenswrapper[5003]: I0104 11:52:12.782975 5003 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="c5b2cdc0d08965307b4f4ab4a9fa7f020d91c1f6d0171dcd5cf86e8c6ca544d0" exitCode=0 Jan 04 11:52:12 crc kubenswrapper[5003]: I0104 11:52:12.783094 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"c5b2cdc0d08965307b4f4ab4a9fa7f020d91c1f6d0171dcd5cf86e8c6ca544d0"} Jan 04 11:52:12 crc kubenswrapper[5003]: I0104 11:52:12.783499 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"460a1abb58a6665454b7af803e48b7651137b71948f79e70d49df9cdc270799e"} Jan 04 11:52:12 crc kubenswrapper[5003]: I0104 11:52:12.783798 5003 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ed41329b-30c2-4489-9315-534f0431252c" Jan 04 11:52:12 crc kubenswrapper[5003]: I0104 11:52:12.783816 5003 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ed41329b-30c2-4489-9315-534f0431252c" Jan 04 11:52:12 crc kubenswrapper[5003]: I0104 11:52:12.784336 5003 status_manager.go:851] "Failed to get status for pod" podUID="a9e44ab2-84f1-4907-916e-6c04d89cc1bb" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.5:6443: connect: connection refused" Jan 04 11:52:12 crc kubenswrapper[5003]: E0104 11:52:12.784387 5003 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.5:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:12 crc kubenswrapper[5003]: I0104 11:52:12.784644 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.5:6443: connect: connection refused" Jan 04 11:52:12 crc kubenswrapper[5003]: I0104 11:52:12.786401 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 04 11:52:12 crc kubenswrapper[5003]: I0104 11:52:12.786440 5003 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d" exitCode=1 Jan 04 11:52:12 crc kubenswrapper[5003]: I0104 11:52:12.786460 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d"} Jan 04 11:52:12 crc kubenswrapper[5003]: I0104 11:52:12.786718 5003 scope.go:117] "RemoveContainer" containerID="e2dbcc12ad7fd10e6cb4a57dcb7439ab17f2e2a8baad9e2ad6f8fc86abf7017d" Jan 04 11:52:12 crc kubenswrapper[5003]: I0104 11:52:12.787751 5003 status_manager.go:851] "Failed to get status for pod" podUID="a9e44ab2-84f1-4907-916e-6c04d89cc1bb" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.5:6443: connect: connection refused" Jan 04 11:52:12 crc kubenswrapper[5003]: I0104 11:52:12.788103 5003 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.5:6443: connect: connection refused" Jan 04 11:52:12 crc kubenswrapper[5003]: I0104 11:52:12.788395 5003 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.5:6443: connect: connection refused" Jan 04 11:52:13 crc kubenswrapper[5003]: I0104 11:52:13.800722 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 04 11:52:13 crc kubenswrapper[5003]: I0104 11:52:13.801166 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"580a093b1e7766c0cafcb8ea9caf4fdbfe0643c1a85ee77ad17d32fc6be4b6f2"} Jan 04 11:52:13 crc kubenswrapper[5003]: I0104 11:52:13.812430 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"c83d9069b2ff44b87431486ac028466394796d0d7f07182d89b52aa344bd2f63"} Jan 04 11:52:13 crc kubenswrapper[5003]: I0104 11:52:13.812473 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"515e19a2fef7e98937fa3af8e3d26bf103114405ea32d3fb0b43e5236e3b67a1"} Jan 04 11:52:13 crc kubenswrapper[5003]: I0104 11:52:13.812486 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"77b2642e90bb52749746916beb8ef7b6e0421ff5b5d9da4f037970f2dbd8c1d5"} Jan 04 11:52:13 crc kubenswrapper[5003]: I0104 11:52:13.812495 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"a74f87150495f83a6d833fb2d0bd9d0e4e1b093bfda0bf1ea901deba0571781c"} Jan 04 11:52:14 crc kubenswrapper[5003]: I0104 11:52:14.819466 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"38f8df8421410899fc9ed7728f1c626d5728da8a8557449e9e701500952e04df"} Jan 04 11:52:14 crc kubenswrapper[5003]: I0104 11:52:14.819925 5003 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ed41329b-30c2-4489-9315-534f0431252c" Jan 04 11:52:14 crc kubenswrapper[5003]: I0104 11:52:14.819942 5003 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ed41329b-30c2-4489-9315-534f0431252c" Jan 04 11:52:14 crc kubenswrapper[5003]: I0104 11:52:14.819991 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:15 crc kubenswrapper[5003]: I0104 11:52:15.687305 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:52:16 crc kubenswrapper[5003]: I0104 11:52:16.828253 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:16 crc kubenswrapper[5003]: I0104 11:52:16.828305 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:16 crc kubenswrapper[5003]: I0104 11:52:16.833295 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:17 crc kubenswrapper[5003]: I0104 11:52:17.730003 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" podUID="508261e1-05b6-486d-9724-768d8729d7dd" containerName="oauth-openshift" containerID="cri-o://37289b816f6f1b0c3420348bebb2c4f3f0ca56eefc3759e4fc4457cc73cf0e4e" gracePeriod=15 Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.182778 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.310979 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-router-certs\") pod \"508261e1-05b6-486d-9724-768d8729d7dd\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.311035 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/508261e1-05b6-486d-9724-768d8729d7dd-audit-policies\") pod \"508261e1-05b6-486d-9724-768d8729d7dd\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.311053 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-ocp-branding-template\") pod \"508261e1-05b6-486d-9724-768d8729d7dd\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.311079 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/508261e1-05b6-486d-9724-768d8729d7dd-audit-dir\") pod \"508261e1-05b6-486d-9724-768d8729d7dd\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.311131 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8bv66\" (UniqueName: \"kubernetes.io/projected/508261e1-05b6-486d-9724-768d8729d7dd-kube-api-access-8bv66\") pod \"508261e1-05b6-486d-9724-768d8729d7dd\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.311155 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-user-template-provider-selection\") pod \"508261e1-05b6-486d-9724-768d8729d7dd\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.311174 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-session\") pod \"508261e1-05b6-486d-9724-768d8729d7dd\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.311239 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-serving-cert\") pod \"508261e1-05b6-486d-9724-768d8729d7dd\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.311236 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/508261e1-05b6-486d-9724-768d8729d7dd-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "508261e1-05b6-486d-9724-768d8729d7dd" (UID: "508261e1-05b6-486d-9724-768d8729d7dd"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.311274 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-user-template-error\") pod \"508261e1-05b6-486d-9724-768d8729d7dd\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.311301 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-cliconfig\") pod \"508261e1-05b6-486d-9724-768d8729d7dd\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.311321 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-trusted-ca-bundle\") pod \"508261e1-05b6-486d-9724-768d8729d7dd\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.311342 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-user-idp-0-file-data\") pod \"508261e1-05b6-486d-9724-768d8729d7dd\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.311357 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-service-ca\") pod \"508261e1-05b6-486d-9724-768d8729d7dd\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.311373 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-user-template-login\") pod \"508261e1-05b6-486d-9724-768d8729d7dd\" (UID: \"508261e1-05b6-486d-9724-768d8729d7dd\") " Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.311564 5003 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/508261e1-05b6-486d-9724-768d8729d7dd-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.312101 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/508261e1-05b6-486d-9724-768d8729d7dd-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "508261e1-05b6-486d-9724-768d8729d7dd" (UID: "508261e1-05b6-486d-9724-768d8729d7dd"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.312861 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "508261e1-05b6-486d-9724-768d8729d7dd" (UID: "508261e1-05b6-486d-9724-768d8729d7dd"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.312889 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "508261e1-05b6-486d-9724-768d8729d7dd" (UID: "508261e1-05b6-486d-9724-768d8729d7dd"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.313007 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "508261e1-05b6-486d-9724-768d8729d7dd" (UID: "508261e1-05b6-486d-9724-768d8729d7dd"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.316599 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "508261e1-05b6-486d-9724-768d8729d7dd" (UID: "508261e1-05b6-486d-9724-768d8729d7dd"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.317111 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "508261e1-05b6-486d-9724-768d8729d7dd" (UID: "508261e1-05b6-486d-9724-768d8729d7dd"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.318186 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "508261e1-05b6-486d-9724-768d8729d7dd" (UID: "508261e1-05b6-486d-9724-768d8729d7dd"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.325389 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "508261e1-05b6-486d-9724-768d8729d7dd" (UID: "508261e1-05b6-486d-9724-768d8729d7dd"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.325470 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/508261e1-05b6-486d-9724-768d8729d7dd-kube-api-access-8bv66" (OuterVolumeSpecName: "kube-api-access-8bv66") pod "508261e1-05b6-486d-9724-768d8729d7dd" (UID: "508261e1-05b6-486d-9724-768d8729d7dd"). InnerVolumeSpecName "kube-api-access-8bv66". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.325991 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "508261e1-05b6-486d-9724-768d8729d7dd" (UID: "508261e1-05b6-486d-9724-768d8729d7dd"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.327331 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "508261e1-05b6-486d-9724-768d8729d7dd" (UID: "508261e1-05b6-486d-9724-768d8729d7dd"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.327460 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "508261e1-05b6-486d-9724-768d8729d7dd" (UID: "508261e1-05b6-486d-9724-768d8729d7dd"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.327620 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "508261e1-05b6-486d-9724-768d8729d7dd" (UID: "508261e1-05b6-486d-9724-768d8729d7dd"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.413818 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.413883 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.413907 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.413929 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.413949 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.413968 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.413991 5003 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/508261e1-05b6-486d-9724-768d8729d7dd-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.414039 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.414082 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8bv66\" (UniqueName: \"kubernetes.io/projected/508261e1-05b6-486d-9724-768d8729d7dd-kube-api-access-8bv66\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.414102 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.414124 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.414143 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.414161 5003 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/508261e1-05b6-486d-9724-768d8729d7dd-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.840447 5003 generic.go:334] "Generic (PLEG): container finished" podID="508261e1-05b6-486d-9724-768d8729d7dd" containerID="37289b816f6f1b0c3420348bebb2c4f3f0ca56eefc3759e4fc4457cc73cf0e4e" exitCode=0 Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.840514 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.840530 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" event={"ID":"508261e1-05b6-486d-9724-768d8729d7dd","Type":"ContainerDied","Data":"37289b816f6f1b0c3420348bebb2c4f3f0ca56eefc3759e4fc4457cc73cf0e4e"} Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.840567 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-pd2px" event={"ID":"508261e1-05b6-486d-9724-768d8729d7dd","Type":"ContainerDied","Data":"9f43035db60d1592fe68106ea26bc027087b12fa928687cab121ce1b4018d9bb"} Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.840586 5003 scope.go:117] "RemoveContainer" containerID="37289b816f6f1b0c3420348bebb2c4f3f0ca56eefc3759e4fc4457cc73cf0e4e" Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.861527 5003 scope.go:117] "RemoveContainer" containerID="37289b816f6f1b0c3420348bebb2c4f3f0ca56eefc3759e4fc4457cc73cf0e4e" Jan 04 11:52:18 crc kubenswrapper[5003]: E0104 11:52:18.861952 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37289b816f6f1b0c3420348bebb2c4f3f0ca56eefc3759e4fc4457cc73cf0e4e\": container with ID starting with 37289b816f6f1b0c3420348bebb2c4f3f0ca56eefc3759e4fc4457cc73cf0e4e not found: ID does not exist" containerID="37289b816f6f1b0c3420348bebb2c4f3f0ca56eefc3759e4fc4457cc73cf0e4e" Jan 04 11:52:18 crc kubenswrapper[5003]: I0104 11:52:18.862002 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37289b816f6f1b0c3420348bebb2c4f3f0ca56eefc3759e4fc4457cc73cf0e4e"} err="failed to get container status \"37289b816f6f1b0c3420348bebb2c4f3f0ca56eefc3759e4fc4457cc73cf0e4e\": rpc error: code = NotFound desc = could not find container \"37289b816f6f1b0c3420348bebb2c4f3f0ca56eefc3759e4fc4457cc73cf0e4e\": container with ID starting with 37289b816f6f1b0c3420348bebb2c4f3f0ca56eefc3759e4fc4457cc73cf0e4e not found: ID does not exist" Jan 04 11:52:19 crc kubenswrapper[5003]: I0104 11:52:19.827382 5003 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:19 crc kubenswrapper[5003]: I0104 11:52:19.846801 5003 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ed41329b-30c2-4489-9315-534f0431252c" Jan 04 11:52:19 crc kubenswrapper[5003]: I0104 11:52:19.846839 5003 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ed41329b-30c2-4489-9315-534f0431252c" Jan 04 11:52:19 crc kubenswrapper[5003]: I0104 11:52:19.851325 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:19 crc kubenswrapper[5003]: I0104 11:52:19.854256 5003 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="b053bb1d-cd98-4d8e-a514-89c1cababfdd" Jan 04 11:52:20 crc kubenswrapper[5003]: E0104 11:52:20.572977 5003 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"oauth-openshift-dockercfg-znhcc\": Failed to watch *v1.Secret: unknown (get secrets)" logger="UnhandledError" Jan 04 11:52:20 crc kubenswrapper[5003]: E0104 11:52:20.607537 5003 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-system-trusted-ca-bundle\": Failed to watch *v1.ConfigMap: unknown (get configmaps)" logger="UnhandledError" Jan 04 11:52:20 crc kubenswrapper[5003]: I0104 11:52:20.850703 5003 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ed41329b-30c2-4489-9315-534f0431252c" Jan 04 11:52:20 crc kubenswrapper[5003]: I0104 11:52:20.850736 5003 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ed41329b-30c2-4489-9315-534f0431252c" Jan 04 11:52:21 crc kubenswrapper[5003]: I0104 11:52:21.926539 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:52:21 crc kubenswrapper[5003]: I0104 11:52:21.931689 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:52:24 crc kubenswrapper[5003]: I0104 11:52:24.824849 5003 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="b053bb1d-cd98-4d8e-a514-89c1cababfdd" Jan 04 11:52:25 crc kubenswrapper[5003]: I0104 11:52:25.696342 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 04 11:52:28 crc kubenswrapper[5003]: I0104 11:52:28.952740 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 04 11:52:31 crc kubenswrapper[5003]: I0104 11:52:31.473507 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 04 11:52:31 crc kubenswrapper[5003]: I0104 11:52:31.795617 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 04 11:52:32 crc kubenswrapper[5003]: I0104 11:52:32.337265 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 04 11:52:32 crc kubenswrapper[5003]: I0104 11:52:32.468233 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 04 11:52:32 crc kubenswrapper[5003]: I0104 11:52:32.522953 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 04 11:52:32 crc kubenswrapper[5003]: I0104 11:52:32.637694 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 04 11:52:32 crc kubenswrapper[5003]: I0104 11:52:32.652666 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 04 11:52:32 crc kubenswrapper[5003]: I0104 11:52:32.849928 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 04 11:52:32 crc kubenswrapper[5003]: I0104 11:52:32.952464 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 04 11:52:32 crc kubenswrapper[5003]: I0104 11:52:32.978439 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 04 11:52:33 crc kubenswrapper[5003]: I0104 11:52:33.062618 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 04 11:52:33 crc kubenswrapper[5003]: I0104 11:52:33.082901 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 04 11:52:33 crc kubenswrapper[5003]: I0104 11:52:33.136876 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 04 11:52:33 crc kubenswrapper[5003]: I0104 11:52:33.172656 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 04 11:52:33 crc kubenswrapper[5003]: I0104 11:52:33.298851 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 04 11:52:33 crc kubenswrapper[5003]: I0104 11:52:33.327227 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 04 11:52:33 crc kubenswrapper[5003]: I0104 11:52:33.473598 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 04 11:52:33 crc kubenswrapper[5003]: I0104 11:52:33.661040 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 04 11:52:33 crc kubenswrapper[5003]: I0104 11:52:33.733263 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 04 11:52:33 crc kubenswrapper[5003]: I0104 11:52:33.817755 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 04 11:52:33 crc kubenswrapper[5003]: I0104 11:52:33.817980 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 04 11:52:33 crc kubenswrapper[5003]: I0104 11:52:33.880521 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 04 11:52:33 crc kubenswrapper[5003]: I0104 11:52:33.941731 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 04 11:52:33 crc kubenswrapper[5003]: I0104 11:52:33.947304 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 04 11:52:34 crc kubenswrapper[5003]: I0104 11:52:34.033451 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 04 11:52:34 crc kubenswrapper[5003]: I0104 11:52:34.105133 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 04 11:52:34 crc kubenswrapper[5003]: I0104 11:52:34.234335 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 04 11:52:34 crc kubenswrapper[5003]: I0104 11:52:34.250625 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 04 11:52:34 crc kubenswrapper[5003]: I0104 11:52:34.350378 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 04 11:52:34 crc kubenswrapper[5003]: I0104 11:52:34.376705 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 04 11:52:34 crc kubenswrapper[5003]: I0104 11:52:34.387507 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 04 11:52:34 crc kubenswrapper[5003]: I0104 11:52:34.571346 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 04 11:52:34 crc kubenswrapper[5003]: I0104 11:52:34.640662 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 04 11:52:34 crc kubenswrapper[5003]: I0104 11:52:34.657217 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 04 11:52:34 crc kubenswrapper[5003]: I0104 11:52:34.710912 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 04 11:52:34 crc kubenswrapper[5003]: I0104 11:52:34.720931 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 04 11:52:34 crc kubenswrapper[5003]: I0104 11:52:34.727754 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 04 11:52:34 crc kubenswrapper[5003]: I0104 11:52:34.742622 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 04 11:52:34 crc kubenswrapper[5003]: I0104 11:52:34.783834 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 04 11:52:34 crc kubenswrapper[5003]: I0104 11:52:34.938864 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 04 11:52:35 crc kubenswrapper[5003]: I0104 11:52:35.005116 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 04 11:52:35 crc kubenswrapper[5003]: I0104 11:52:35.053552 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 04 11:52:35 crc kubenswrapper[5003]: I0104 11:52:35.068045 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 04 11:52:35 crc kubenswrapper[5003]: I0104 11:52:35.217309 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 04 11:52:35 crc kubenswrapper[5003]: I0104 11:52:35.373248 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 04 11:52:35 crc kubenswrapper[5003]: I0104 11:52:35.374272 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 04 11:52:35 crc kubenswrapper[5003]: I0104 11:52:35.465843 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 04 11:52:35 crc kubenswrapper[5003]: I0104 11:52:35.674442 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 04 11:52:35 crc kubenswrapper[5003]: I0104 11:52:35.683078 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 04 11:52:35 crc kubenswrapper[5003]: I0104 11:52:35.771243 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 04 11:52:35 crc kubenswrapper[5003]: I0104 11:52:35.786915 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 04 11:52:35 crc kubenswrapper[5003]: I0104 11:52:35.787355 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 04 11:52:35 crc kubenswrapper[5003]: I0104 11:52:35.823142 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 04 11:52:35 crc kubenswrapper[5003]: I0104 11:52:35.828694 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 04 11:52:35 crc kubenswrapper[5003]: I0104 11:52:35.963303 5003 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 04 11:52:36 crc kubenswrapper[5003]: I0104 11:52:36.086368 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 04 11:52:36 crc kubenswrapper[5003]: I0104 11:52:36.129322 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 04 11:52:36 crc kubenswrapper[5003]: I0104 11:52:36.150366 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 04 11:52:36 crc kubenswrapper[5003]: I0104 11:52:36.168364 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 04 11:52:36 crc kubenswrapper[5003]: I0104 11:52:36.219067 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 04 11:52:36 crc kubenswrapper[5003]: I0104 11:52:36.267597 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 04 11:52:36 crc kubenswrapper[5003]: I0104 11:52:36.404735 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 04 11:52:36 crc kubenswrapper[5003]: I0104 11:52:36.524089 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 04 11:52:36 crc kubenswrapper[5003]: I0104 11:52:36.612526 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 04 11:52:36 crc kubenswrapper[5003]: I0104 11:52:36.700000 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 04 11:52:36 crc kubenswrapper[5003]: I0104 11:52:36.883247 5003 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 04 11:52:36 crc kubenswrapper[5003]: I0104 11:52:36.917501 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 04 11:52:36 crc kubenswrapper[5003]: I0104 11:52:36.955429 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 04 11:52:37 crc kubenswrapper[5003]: I0104 11:52:37.250691 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 04 11:52:37 crc kubenswrapper[5003]: I0104 11:52:37.371425 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 04 11:52:37 crc kubenswrapper[5003]: I0104 11:52:37.470131 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 04 11:52:37 crc kubenswrapper[5003]: I0104 11:52:37.479311 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 04 11:52:37 crc kubenswrapper[5003]: I0104 11:52:37.606992 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 04 11:52:37 crc kubenswrapper[5003]: I0104 11:52:37.669834 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 04 11:52:37 crc kubenswrapper[5003]: I0104 11:52:37.695952 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 04 11:52:37 crc kubenswrapper[5003]: I0104 11:52:37.988129 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 04 11:52:38 crc kubenswrapper[5003]: I0104 11:52:38.006705 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 04 11:52:38 crc kubenswrapper[5003]: I0104 11:52:38.040666 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 04 11:52:38 crc kubenswrapper[5003]: I0104 11:52:38.071580 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 04 11:52:38 crc kubenswrapper[5003]: I0104 11:52:38.121298 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 04 11:52:38 crc kubenswrapper[5003]: I0104 11:52:38.237958 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 04 11:52:38 crc kubenswrapper[5003]: I0104 11:52:38.332030 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 04 11:52:38 crc kubenswrapper[5003]: I0104 11:52:38.354368 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 04 11:52:38 crc kubenswrapper[5003]: I0104 11:52:38.486494 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 04 11:52:38 crc kubenswrapper[5003]: I0104 11:52:38.504163 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 04 11:52:38 crc kubenswrapper[5003]: I0104 11:52:38.541615 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 04 11:52:38 crc kubenswrapper[5003]: I0104 11:52:38.552450 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 04 11:52:38 crc kubenswrapper[5003]: I0104 11:52:38.625786 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 04 11:52:38 crc kubenswrapper[5003]: I0104 11:52:38.681093 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 04 11:52:38 crc kubenswrapper[5003]: I0104 11:52:38.713661 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 04 11:52:38 crc kubenswrapper[5003]: I0104 11:52:38.790356 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 04 11:52:38 crc kubenswrapper[5003]: I0104 11:52:38.871071 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 04 11:52:38 crc kubenswrapper[5003]: I0104 11:52:38.910508 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 04 11:52:39 crc kubenswrapper[5003]: I0104 11:52:39.013173 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 04 11:52:39 crc kubenswrapper[5003]: I0104 11:52:39.050443 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 04 11:52:39 crc kubenswrapper[5003]: I0104 11:52:39.172211 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 04 11:52:39 crc kubenswrapper[5003]: I0104 11:52:39.245251 5003 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 04 11:52:39 crc kubenswrapper[5003]: I0104 11:52:39.342857 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 04 11:52:39 crc kubenswrapper[5003]: I0104 11:52:39.539021 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 04 11:52:39 crc kubenswrapper[5003]: I0104 11:52:39.577373 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 04 11:52:39 crc kubenswrapper[5003]: I0104 11:52:39.626790 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 04 11:52:39 crc kubenswrapper[5003]: I0104 11:52:39.707830 5003 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 04 11:52:39 crc kubenswrapper[5003]: I0104 11:52:39.730444 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 04 11:52:39 crc kubenswrapper[5003]: I0104 11:52:39.744479 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 04 11:52:39 crc kubenswrapper[5003]: I0104 11:52:39.796756 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 04 11:52:39 crc kubenswrapper[5003]: I0104 11:52:39.797500 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 04 11:52:39 crc kubenswrapper[5003]: I0104 11:52:39.943118 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 04 11:52:39 crc kubenswrapper[5003]: I0104 11:52:39.973454 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 04 11:52:40 crc kubenswrapper[5003]: I0104 11:52:40.034312 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 04 11:52:40 crc kubenswrapper[5003]: I0104 11:52:40.084602 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 04 11:52:40 crc kubenswrapper[5003]: I0104 11:52:40.087282 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 04 11:52:40 crc kubenswrapper[5003]: I0104 11:52:40.181697 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 04 11:52:40 crc kubenswrapper[5003]: I0104 11:52:40.242164 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 04 11:52:40 crc kubenswrapper[5003]: I0104 11:52:40.252905 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 04 11:52:40 crc kubenswrapper[5003]: I0104 11:52:40.284118 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 04 11:52:40 crc kubenswrapper[5003]: I0104 11:52:40.308485 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 04 11:52:40 crc kubenswrapper[5003]: I0104 11:52:40.326505 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 04 11:52:40 crc kubenswrapper[5003]: I0104 11:52:40.336557 5003 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 04 11:52:40 crc kubenswrapper[5003]: I0104 11:52:40.337230 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=41.337214816 podStartE2EDuration="41.337214816s" podCreationTimestamp="2026-01-04 11:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:52:19.612264677 +0000 UTC m=+255.085294518" watchObservedRunningTime="2026-01-04 11:52:40.337214816 +0000 UTC m=+275.810244667" Jan 04 11:52:40 crc kubenswrapper[5003]: I0104 11:52:40.341980 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-pd2px","openshift-kube-apiserver/kube-apiserver-crc"] Jan 04 11:52:40 crc kubenswrapper[5003]: I0104 11:52:40.342071 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 04 11:52:40 crc kubenswrapper[5003]: I0104 11:52:40.345064 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 04 11:52:40 crc kubenswrapper[5003]: I0104 11:52:40.346971 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 04 11:52:40 crc kubenswrapper[5003]: I0104 11:52:40.376113 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=21.376089397 podStartE2EDuration="21.376089397s" podCreationTimestamp="2026-01-04 11:52:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:52:40.372323265 +0000 UTC m=+275.845353126" watchObservedRunningTime="2026-01-04 11:52:40.376089397 +0000 UTC m=+275.849119238" Jan 04 11:52:40 crc kubenswrapper[5003]: I0104 11:52:40.439181 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 04 11:52:40 crc kubenswrapper[5003]: I0104 11:52:40.462040 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 04 11:52:40 crc kubenswrapper[5003]: I0104 11:52:40.589672 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 04 11:52:40 crc kubenswrapper[5003]: I0104 11:52:40.602955 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 04 11:52:40 crc kubenswrapper[5003]: I0104 11:52:40.619117 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 04 11:52:40 crc kubenswrapper[5003]: I0104 11:52:40.627957 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 04 11:52:40 crc kubenswrapper[5003]: I0104 11:52:40.716869 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 04 11:52:40 crc kubenswrapper[5003]: I0104 11:52:40.764531 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 04 11:52:40 crc kubenswrapper[5003]: I0104 11:52:40.786412 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 04 11:52:40 crc kubenswrapper[5003]: I0104 11:52:40.807910 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 04 11:52:40 crc kubenswrapper[5003]: I0104 11:52:40.830511 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="508261e1-05b6-486d-9724-768d8729d7dd" path="/var/lib/kubelet/pods/508261e1-05b6-486d-9724-768d8729d7dd/volumes" Jan 04 11:52:40 crc kubenswrapper[5003]: I0104 11:52:40.921487 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 04 11:52:41 crc kubenswrapper[5003]: I0104 11:52:41.020402 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 04 11:52:41 crc kubenswrapper[5003]: I0104 11:52:41.055391 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 04 11:52:41 crc kubenswrapper[5003]: I0104 11:52:41.067206 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 04 11:52:41 crc kubenswrapper[5003]: I0104 11:52:41.099547 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 04 11:52:41 crc kubenswrapper[5003]: I0104 11:52:41.125848 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 04 11:52:41 crc kubenswrapper[5003]: I0104 11:52:41.127901 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 04 11:52:41 crc kubenswrapper[5003]: I0104 11:52:41.141719 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 04 11:52:41 crc kubenswrapper[5003]: I0104 11:52:41.154891 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 04 11:52:41 crc kubenswrapper[5003]: I0104 11:52:41.251726 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 04 11:52:41 crc kubenswrapper[5003]: I0104 11:52:41.298212 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 04 11:52:41 crc kubenswrapper[5003]: I0104 11:52:41.329994 5003 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 04 11:52:41 crc kubenswrapper[5003]: I0104 11:52:41.405780 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 04 11:52:41 crc kubenswrapper[5003]: I0104 11:52:41.425696 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 04 11:52:41 crc kubenswrapper[5003]: I0104 11:52:41.457349 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 04 11:52:41 crc kubenswrapper[5003]: I0104 11:52:41.521569 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 04 11:52:41 crc kubenswrapper[5003]: I0104 11:52:41.555908 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 04 11:52:41 crc kubenswrapper[5003]: I0104 11:52:41.600454 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 04 11:52:41 crc kubenswrapper[5003]: I0104 11:52:41.608511 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 04 11:52:41 crc kubenswrapper[5003]: I0104 11:52:41.621244 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 04 11:52:41 crc kubenswrapper[5003]: I0104 11:52:41.628721 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 04 11:52:41 crc kubenswrapper[5003]: I0104 11:52:41.651256 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 04 11:52:41 crc kubenswrapper[5003]: I0104 11:52:41.678292 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 04 11:52:41 crc kubenswrapper[5003]: I0104 11:52:41.760192 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 04 11:52:41 crc kubenswrapper[5003]: I0104 11:52:41.802531 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 04 11:52:41 crc kubenswrapper[5003]: I0104 11:52:41.810830 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 04 11:52:41 crc kubenswrapper[5003]: I0104 11:52:41.812237 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 04 11:52:41 crc kubenswrapper[5003]: I0104 11:52:41.835340 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 04 11:52:41 crc kubenswrapper[5003]: I0104 11:52:41.964864 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 04 11:52:41 crc kubenswrapper[5003]: I0104 11:52:41.985067 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 04 11:52:42 crc kubenswrapper[5003]: I0104 11:52:42.123934 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 04 11:52:42 crc kubenswrapper[5003]: I0104 11:52:42.225613 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 04 11:52:42 crc kubenswrapper[5003]: I0104 11:52:42.287856 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 04 11:52:42 crc kubenswrapper[5003]: I0104 11:52:42.314507 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 04 11:52:42 crc kubenswrapper[5003]: I0104 11:52:42.376391 5003 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 04 11:52:42 crc kubenswrapper[5003]: I0104 11:52:42.377590 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://8447ab6e523f09640dda665fc0815c20d1eedbda20860a4681d59ccb1eb817bd" gracePeriod=5 Jan 04 11:52:42 crc kubenswrapper[5003]: I0104 11:52:42.435551 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 04 11:52:42 crc kubenswrapper[5003]: I0104 11:52:42.446711 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 04 11:52:42 crc kubenswrapper[5003]: I0104 11:52:42.471257 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 04 11:52:42 crc kubenswrapper[5003]: I0104 11:52:42.482006 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 04 11:52:42 crc kubenswrapper[5003]: I0104 11:52:42.495678 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 04 11:52:42 crc kubenswrapper[5003]: I0104 11:52:42.515608 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 04 11:52:42 crc kubenswrapper[5003]: I0104 11:52:42.580587 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 04 11:52:42 crc kubenswrapper[5003]: I0104 11:52:42.632399 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 04 11:52:42 crc kubenswrapper[5003]: I0104 11:52:42.753556 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 04 11:52:42 crc kubenswrapper[5003]: I0104 11:52:42.783710 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 04 11:52:42 crc kubenswrapper[5003]: I0104 11:52:42.833567 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 04 11:52:42 crc kubenswrapper[5003]: I0104 11:52:42.855155 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 04 11:52:42 crc kubenswrapper[5003]: I0104 11:52:42.855421 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 04 11:52:42 crc kubenswrapper[5003]: I0104 11:52:42.861458 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 04 11:52:42 crc kubenswrapper[5003]: I0104 11:52:42.872244 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 04 11:52:43 crc kubenswrapper[5003]: I0104 11:52:42.999601 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 04 11:52:43 crc kubenswrapper[5003]: I0104 11:52:43.202885 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 04 11:52:43 crc kubenswrapper[5003]: I0104 11:52:43.272891 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 04 11:52:43 crc kubenswrapper[5003]: I0104 11:52:43.285710 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 04 11:52:43 crc kubenswrapper[5003]: I0104 11:52:43.364240 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 04 11:52:43 crc kubenswrapper[5003]: I0104 11:52:43.396593 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 04 11:52:43 crc kubenswrapper[5003]: I0104 11:52:43.532239 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 04 11:52:43 crc kubenswrapper[5003]: I0104 11:52:43.659104 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 04 11:52:43 crc kubenswrapper[5003]: I0104 11:52:43.692478 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 04 11:52:43 crc kubenswrapper[5003]: I0104 11:52:43.724518 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 04 11:52:43 crc kubenswrapper[5003]: I0104 11:52:43.795361 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 04 11:52:43 crc kubenswrapper[5003]: I0104 11:52:43.821506 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 04 11:52:43 crc kubenswrapper[5003]: I0104 11:52:43.861295 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 04 11:52:43 crc kubenswrapper[5003]: I0104 11:52:43.976567 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 04 11:52:44 crc kubenswrapper[5003]: I0104 11:52:44.056480 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 04 11:52:44 crc kubenswrapper[5003]: I0104 11:52:44.094112 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 04 11:52:44 crc kubenswrapper[5003]: I0104 11:52:44.098184 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 04 11:52:44 crc kubenswrapper[5003]: I0104 11:52:44.153068 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 04 11:52:44 crc kubenswrapper[5003]: I0104 11:52:44.223107 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 04 11:52:44 crc kubenswrapper[5003]: I0104 11:52:44.226196 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 04 11:52:44 crc kubenswrapper[5003]: I0104 11:52:44.231810 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 04 11:52:44 crc kubenswrapper[5003]: I0104 11:52:44.263815 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 04 11:52:44 crc kubenswrapper[5003]: I0104 11:52:44.270791 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 04 11:52:44 crc kubenswrapper[5003]: I0104 11:52:44.363120 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 04 11:52:44 crc kubenswrapper[5003]: I0104 11:52:44.394680 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 04 11:52:44 crc kubenswrapper[5003]: I0104 11:52:44.447754 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 04 11:52:44 crc kubenswrapper[5003]: I0104 11:52:44.525559 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 04 11:52:44 crc kubenswrapper[5003]: I0104 11:52:44.601142 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 04 11:52:44 crc kubenswrapper[5003]: I0104 11:52:44.608847 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 04 11:52:44 crc kubenswrapper[5003]: I0104 11:52:44.621964 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 04 11:52:44 crc kubenswrapper[5003]: I0104 11:52:44.740592 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 04 11:52:44 crc kubenswrapper[5003]: I0104 11:52:44.833631 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 04 11:52:44 crc kubenswrapper[5003]: I0104 11:52:44.911391 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 04 11:52:45 crc kubenswrapper[5003]: I0104 11:52:45.009714 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 04 11:52:45 crc kubenswrapper[5003]: I0104 11:52:45.028316 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 04 11:52:45 crc kubenswrapper[5003]: I0104 11:52:45.099800 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 04 11:52:45 crc kubenswrapper[5003]: I0104 11:52:45.174577 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 04 11:52:45 crc kubenswrapper[5003]: I0104 11:52:45.254654 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 04 11:52:45 crc kubenswrapper[5003]: I0104 11:52:45.350787 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 04 11:52:45 crc kubenswrapper[5003]: I0104 11:52:45.367861 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 04 11:52:45 crc kubenswrapper[5003]: I0104 11:52:45.459364 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 04 11:52:45 crc kubenswrapper[5003]: I0104 11:52:45.492623 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 04 11:52:45 crc kubenswrapper[5003]: I0104 11:52:45.553329 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 04 11:52:45 crc kubenswrapper[5003]: I0104 11:52:45.715108 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 04 11:52:45 crc kubenswrapper[5003]: I0104 11:52:45.772358 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 04 11:52:45 crc kubenswrapper[5003]: I0104 11:52:45.809582 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 04 11:52:45 crc kubenswrapper[5003]: I0104 11:52:45.881757 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 04 11:52:46 crc kubenswrapper[5003]: I0104 11:52:46.094435 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 04 11:52:46 crc kubenswrapper[5003]: I0104 11:52:46.359083 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 04 11:52:46 crc kubenswrapper[5003]: I0104 11:52:46.400906 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 04 11:52:46 crc kubenswrapper[5003]: I0104 11:52:46.736510 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 04 11:52:46 crc kubenswrapper[5003]: I0104 11:52:46.755604 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 04 11:52:46 crc kubenswrapper[5003]: I0104 11:52:46.766931 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 04 11:52:46 crc kubenswrapper[5003]: I0104 11:52:46.874349 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 04 11:52:47 crc kubenswrapper[5003]: I0104 11:52:47.239487 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 04 11:52:47 crc kubenswrapper[5003]: I0104 11:52:47.513342 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 04 11:52:47 crc kubenswrapper[5003]: I0104 11:52:47.513835 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:52:47 crc kubenswrapper[5003]: I0104 11:52:47.545223 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 04 11:52:47 crc kubenswrapper[5003]: I0104 11:52:47.545271 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 04 11:52:47 crc kubenswrapper[5003]: I0104 11:52:47.545324 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 04 11:52:47 crc kubenswrapper[5003]: I0104 11:52:47.545349 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 04 11:52:47 crc kubenswrapper[5003]: I0104 11:52:47.545373 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 04 11:52:47 crc kubenswrapper[5003]: I0104 11:52:47.545420 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:52:47 crc kubenswrapper[5003]: I0104 11:52:47.545420 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:52:47 crc kubenswrapper[5003]: I0104 11:52:47.545504 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:52:47 crc kubenswrapper[5003]: I0104 11:52:47.545484 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:52:47 crc kubenswrapper[5003]: I0104 11:52:47.545823 5003 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:47 crc kubenswrapper[5003]: I0104 11:52:47.545847 5003 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:47 crc kubenswrapper[5003]: I0104 11:52:47.545860 5003 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:47 crc kubenswrapper[5003]: I0104 11:52:47.545873 5003 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:47 crc kubenswrapper[5003]: I0104 11:52:47.554888 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:52:47 crc kubenswrapper[5003]: I0104 11:52:47.647472 5003 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 04 11:52:47 crc kubenswrapper[5003]: I0104 11:52:47.843448 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 04 11:52:48 crc kubenswrapper[5003]: I0104 11:52:48.036193 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 04 11:52:48 crc kubenswrapper[5003]: I0104 11:52:48.036242 5003 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="8447ab6e523f09640dda665fc0815c20d1eedbda20860a4681d59ccb1eb817bd" exitCode=137 Jan 04 11:52:48 crc kubenswrapper[5003]: I0104 11:52:48.036310 5003 scope.go:117] "RemoveContainer" containerID="8447ab6e523f09640dda665fc0815c20d1eedbda20860a4681d59ccb1eb817bd" Jan 04 11:52:48 crc kubenswrapper[5003]: I0104 11:52:48.036430 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 04 11:52:48 crc kubenswrapper[5003]: I0104 11:52:48.061095 5003 scope.go:117] "RemoveContainer" containerID="8447ab6e523f09640dda665fc0815c20d1eedbda20860a4681d59ccb1eb817bd" Jan 04 11:52:48 crc kubenswrapper[5003]: E0104 11:52:48.061938 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8447ab6e523f09640dda665fc0815c20d1eedbda20860a4681d59ccb1eb817bd\": container with ID starting with 8447ab6e523f09640dda665fc0815c20d1eedbda20860a4681d59ccb1eb817bd not found: ID does not exist" containerID="8447ab6e523f09640dda665fc0815c20d1eedbda20860a4681d59ccb1eb817bd" Jan 04 11:52:48 crc kubenswrapper[5003]: I0104 11:52:48.061992 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8447ab6e523f09640dda665fc0815c20d1eedbda20860a4681d59ccb1eb817bd"} err="failed to get container status \"8447ab6e523f09640dda665fc0815c20d1eedbda20860a4681d59ccb1eb817bd\": rpc error: code = NotFound desc = could not find container \"8447ab6e523f09640dda665fc0815c20d1eedbda20860a4681d59ccb1eb817bd\": container with ID starting with 8447ab6e523f09640dda665fc0815c20d1eedbda20860a4681d59ccb1eb817bd not found: ID does not exist" Jan 04 11:52:48 crc kubenswrapper[5003]: I0104 11:52:48.691260 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 04 11:52:48 crc kubenswrapper[5003]: I0104 11:52:48.819721 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Jan 04 11:52:48 crc kubenswrapper[5003]: I0104 11:52:48.820235 5003 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Jan 04 11:52:48 crc kubenswrapper[5003]: I0104 11:52:48.834956 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 04 11:52:48 crc kubenswrapper[5003]: I0104 11:52:48.835010 5003 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="b6b501ed-3028-4288-91fa-d80ee8c9b7fc" Jan 04 11:52:48 crc kubenswrapper[5003]: I0104 11:52:48.841929 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 04 11:52:48 crc kubenswrapper[5003]: I0104 11:52:48.841990 5003 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="b6b501ed-3028-4288-91fa-d80ee8c9b7fc" Jan 04 11:52:48 crc kubenswrapper[5003]: I0104 11:52:48.985684 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.175242 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-c4f645f59-jq94v"] Jan 04 11:52:52 crc kubenswrapper[5003]: E0104 11:52:52.175448 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="508261e1-05b6-486d-9724-768d8729d7dd" containerName="oauth-openshift" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.175462 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="508261e1-05b6-486d-9724-768d8729d7dd" containerName="oauth-openshift" Jan 04 11:52:52 crc kubenswrapper[5003]: E0104 11:52:52.175480 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.175487 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 04 11:52:52 crc kubenswrapper[5003]: E0104 11:52:52.175499 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9e44ab2-84f1-4907-916e-6c04d89cc1bb" containerName="installer" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.175504 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9e44ab2-84f1-4907-916e-6c04d89cc1bb" containerName="installer" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.175584 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="508261e1-05b6-486d-9724-768d8729d7dd" containerName="oauth-openshift" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.175594 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.175607 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9e44ab2-84f1-4907-916e-6c04d89cc1bb" containerName="installer" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.175941 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.178763 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.180789 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.181092 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.181328 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.181486 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.181623 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.181902 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.182079 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.182634 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.182786 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.183058 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.184238 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.212610 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.213432 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-system-session\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.213478 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c14fc14c-af29-4e81-958e-ccfdf8c40078-audit-policies\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.213573 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-system-serving-cert\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.213622 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.213700 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-system-cliconfig\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.213747 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-system-router-certs\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.213783 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.213812 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-system-service-ca\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.213840 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.213966 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-user-template-error\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.214027 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55x5q\" (UniqueName: \"kubernetes.io/projected/c14fc14c-af29-4e81-958e-ccfdf8c40078-kube-api-access-55x5q\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.214055 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.214092 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c14fc14c-af29-4e81-958e-ccfdf8c40078-audit-dir\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.214125 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-user-template-login\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.214522 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-c4f645f59-jq94v"] Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.215826 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.217958 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.316478 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.316584 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-user-template-error\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.316635 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55x5q\" (UniqueName: \"kubernetes.io/projected/c14fc14c-af29-4e81-958e-ccfdf8c40078-kube-api-access-55x5q\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.316693 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c14fc14c-af29-4e81-958e-ccfdf8c40078-audit-dir\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.316732 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-user-template-login\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.316783 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.316832 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-system-session\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.316878 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c14fc14c-af29-4e81-958e-ccfdf8c40078-audit-policies\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.316872 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c14fc14c-af29-4e81-958e-ccfdf8c40078-audit-dir\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.316944 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-system-serving-cert\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.316993 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.317077 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-system-cliconfig\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.317135 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-system-router-certs\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.317177 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.317219 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-system-service-ca\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.317896 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c14fc14c-af29-4e81-958e-ccfdf8c40078-audit-policies\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.318319 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.318589 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-system-cliconfig\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.318587 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-system-service-ca\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.323957 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-user-template-error\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.324164 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-system-router-certs\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.324664 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-system-serving-cert\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.325508 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-system-session\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.325600 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.325982 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.326529 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.327564 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c14fc14c-af29-4e81-958e-ccfdf8c40078-v4-0-config-user-template-login\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.335254 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55x5q\" (UniqueName: \"kubernetes.io/projected/c14fc14c-af29-4e81-958e-ccfdf8c40078-kube-api-access-55x5q\") pod \"oauth-openshift-c4f645f59-jq94v\" (UID: \"c14fc14c-af29-4e81-958e-ccfdf8c40078\") " pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.511707 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:52 crc kubenswrapper[5003]: I0104 11:52:52.954972 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-c4f645f59-jq94v"] Jan 04 11:52:53 crc kubenswrapper[5003]: I0104 11:52:53.069369 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" event={"ID":"c14fc14c-af29-4e81-958e-ccfdf8c40078","Type":"ContainerStarted","Data":"c0efe5ba54198e5608cd5b8bcf00cb8f6d66e07ed061f82c0996618078ae7565"} Jan 04 11:52:54 crc kubenswrapper[5003]: I0104 11:52:54.078974 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" event={"ID":"c14fc14c-af29-4e81-958e-ccfdf8c40078","Type":"ContainerStarted","Data":"55e826224b262459d54e9db3e4ffb7819d3277ed4b50173c1431d29e953af112"} Jan 04 11:52:54 crc kubenswrapper[5003]: I0104 11:52:54.079413 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:54 crc kubenswrapper[5003]: I0104 11:52:54.084562 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" Jan 04 11:52:54 crc kubenswrapper[5003]: I0104 11:52:54.106635 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-c4f645f59-jq94v" podStartSLOduration=62.106610365 podStartE2EDuration="1m2.106610365s" podCreationTimestamp="2026-01-04 11:51:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:52:54.10236419 +0000 UTC m=+289.575394051" watchObservedRunningTime="2026-01-04 11:52:54.106610365 +0000 UTC m=+289.579640246" Jan 04 11:53:02 crc kubenswrapper[5003]: I0104 11:53:02.134085 5003 generic.go:334] "Generic (PLEG): container finished" podID="fad64100-7cb6-4457-9b74-ccc0cdf1dbb1" containerID="eacbcaa6129ad886bd2c0d4a6ad42d438a67109e876818fe359a25ea80bff258" exitCode=0 Jan 04 11:53:02 crc kubenswrapper[5003]: I0104 11:53:02.134259 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-m6xcl" event={"ID":"fad64100-7cb6-4457-9b74-ccc0cdf1dbb1","Type":"ContainerDied","Data":"eacbcaa6129ad886bd2c0d4a6ad42d438a67109e876818fe359a25ea80bff258"} Jan 04 11:53:02 crc kubenswrapper[5003]: I0104 11:53:02.137363 5003 scope.go:117] "RemoveContainer" containerID="eacbcaa6129ad886bd2c0d4a6ad42d438a67109e876818fe359a25ea80bff258" Jan 04 11:53:03 crc kubenswrapper[5003]: I0104 11:53:03.146674 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-m6xcl" event={"ID":"fad64100-7cb6-4457-9b74-ccc0cdf1dbb1","Type":"ContainerStarted","Data":"5c3c6798e59f9cfbd2e2e376c092ec2d030730d2bb7ec1e822804e8244c1648f"} Jan 04 11:53:03 crc kubenswrapper[5003]: I0104 11:53:03.148495 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-m6xcl" Jan 04 11:53:03 crc kubenswrapper[5003]: I0104 11:53:03.154293 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-m6xcl" Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.021768 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-tk9gz"] Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.022581 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-tk9gz" podUID="8006abe7-31d8-489d-9005-d96d40bb9ba5" containerName="controller-manager" containerID="cri-o://04fe15c1cd00c4072d285d953ee622494d0c7d2d2f1d7202f33d4e3526178996" gracePeriod=30 Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.120504 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-nxmj9"] Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.120735 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nxmj9" podUID="40dada5c-5a67-4362-b9fb-e49a7fc32307" containerName="route-controller-manager" containerID="cri-o://b22d31f5b66fff9450cc88d09772d17c5a42ff042041cca32e7f7b2b7b98d4a2" gracePeriod=30 Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.154583 5003 generic.go:334] "Generic (PLEG): container finished" podID="8006abe7-31d8-489d-9005-d96d40bb9ba5" containerID="04fe15c1cd00c4072d285d953ee622494d0c7d2d2f1d7202f33d4e3526178996" exitCode=0 Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.154717 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-tk9gz" event={"ID":"8006abe7-31d8-489d-9005-d96d40bb9ba5","Type":"ContainerDied","Data":"04fe15c1cd00c4072d285d953ee622494d0c7d2d2f1d7202f33d4e3526178996"} Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.476913 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-tk9gz" Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.510874 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nxmj9" Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.598555 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8006abe7-31d8-489d-9005-d96d40bb9ba5-serving-cert\") pod \"8006abe7-31d8-489d-9005-d96d40bb9ba5\" (UID: \"8006abe7-31d8-489d-9005-d96d40bb9ba5\") " Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.599184 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40dada5c-5a67-4362-b9fb-e49a7fc32307-config\") pod \"40dada5c-5a67-4362-b9fb-e49a7fc32307\" (UID: \"40dada5c-5a67-4362-b9fb-e49a7fc32307\") " Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.599257 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8006abe7-31d8-489d-9005-d96d40bb9ba5-client-ca\") pod \"8006abe7-31d8-489d-9005-d96d40bb9ba5\" (UID: \"8006abe7-31d8-489d-9005-d96d40bb9ba5\") " Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.600270 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8006abe7-31d8-489d-9005-d96d40bb9ba5-client-ca" (OuterVolumeSpecName: "client-ca") pod "8006abe7-31d8-489d-9005-d96d40bb9ba5" (UID: "8006abe7-31d8-489d-9005-d96d40bb9ba5"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.600365 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8006abe7-31d8-489d-9005-d96d40bb9ba5-proxy-ca-bundles\") pod \"8006abe7-31d8-489d-9005-d96d40bb9ba5\" (UID: \"8006abe7-31d8-489d-9005-d96d40bb9ba5\") " Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.600388 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/40dada5c-5a67-4362-b9fb-e49a7fc32307-serving-cert\") pod \"40dada5c-5a67-4362-b9fb-e49a7fc32307\" (UID: \"40dada5c-5a67-4362-b9fb-e49a7fc32307\") " Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.600418 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40dada5c-5a67-4362-b9fb-e49a7fc32307-config" (OuterVolumeSpecName: "config") pod "40dada5c-5a67-4362-b9fb-e49a7fc32307" (UID: "40dada5c-5a67-4362-b9fb-e49a7fc32307"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.600921 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8006abe7-31d8-489d-9005-d96d40bb9ba5-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "8006abe7-31d8-489d-9005-d96d40bb9ba5" (UID: "8006abe7-31d8-489d-9005-d96d40bb9ba5"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.601050 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55q26\" (UniqueName: \"kubernetes.io/projected/8006abe7-31d8-489d-9005-d96d40bb9ba5-kube-api-access-55q26\") pod \"8006abe7-31d8-489d-9005-d96d40bb9ba5\" (UID: \"8006abe7-31d8-489d-9005-d96d40bb9ba5\") " Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.601451 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g2w5q\" (UniqueName: \"kubernetes.io/projected/40dada5c-5a67-4362-b9fb-e49a7fc32307-kube-api-access-g2w5q\") pod \"40dada5c-5a67-4362-b9fb-e49a7fc32307\" (UID: \"40dada5c-5a67-4362-b9fb-e49a7fc32307\") " Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.601511 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/40dada5c-5a67-4362-b9fb-e49a7fc32307-client-ca\") pod \"40dada5c-5a67-4362-b9fb-e49a7fc32307\" (UID: \"40dada5c-5a67-4362-b9fb-e49a7fc32307\") " Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.601541 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8006abe7-31d8-489d-9005-d96d40bb9ba5-config\") pod \"8006abe7-31d8-489d-9005-d96d40bb9ba5\" (UID: \"8006abe7-31d8-489d-9005-d96d40bb9ba5\") " Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.601853 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/40dada5c-5a67-4362-b9fb-e49a7fc32307-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.601874 5003 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8006abe7-31d8-489d-9005-d96d40bb9ba5-client-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.601886 5003 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8006abe7-31d8-489d-9005-d96d40bb9ba5-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.602351 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40dada5c-5a67-4362-b9fb-e49a7fc32307-client-ca" (OuterVolumeSpecName: "client-ca") pod "40dada5c-5a67-4362-b9fb-e49a7fc32307" (UID: "40dada5c-5a67-4362-b9fb-e49a7fc32307"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.602627 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8006abe7-31d8-489d-9005-d96d40bb9ba5-config" (OuterVolumeSpecName: "config") pod "8006abe7-31d8-489d-9005-d96d40bb9ba5" (UID: "8006abe7-31d8-489d-9005-d96d40bb9ba5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.605197 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8006abe7-31d8-489d-9005-d96d40bb9ba5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8006abe7-31d8-489d-9005-d96d40bb9ba5" (UID: "8006abe7-31d8-489d-9005-d96d40bb9ba5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.605407 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40dada5c-5a67-4362-b9fb-e49a7fc32307-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "40dada5c-5a67-4362-b9fb-e49a7fc32307" (UID: "40dada5c-5a67-4362-b9fb-e49a7fc32307"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.605591 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40dada5c-5a67-4362-b9fb-e49a7fc32307-kube-api-access-g2w5q" (OuterVolumeSpecName: "kube-api-access-g2w5q") pod "40dada5c-5a67-4362-b9fb-e49a7fc32307" (UID: "40dada5c-5a67-4362-b9fb-e49a7fc32307"). InnerVolumeSpecName "kube-api-access-g2w5q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.605630 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8006abe7-31d8-489d-9005-d96d40bb9ba5-kube-api-access-55q26" (OuterVolumeSpecName: "kube-api-access-55q26") pod "8006abe7-31d8-489d-9005-d96d40bb9ba5" (UID: "8006abe7-31d8-489d-9005-d96d40bb9ba5"). InnerVolumeSpecName "kube-api-access-55q26". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.686526 5003 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.703316 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55q26\" (UniqueName: \"kubernetes.io/projected/8006abe7-31d8-489d-9005-d96d40bb9ba5-kube-api-access-55q26\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.703368 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g2w5q\" (UniqueName: \"kubernetes.io/projected/40dada5c-5a67-4362-b9fb-e49a7fc32307-kube-api-access-g2w5q\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.703390 5003 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/40dada5c-5a67-4362-b9fb-e49a7fc32307-client-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.703411 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8006abe7-31d8-489d-9005-d96d40bb9ba5-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.703435 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8006abe7-31d8-489d-9005-d96d40bb9ba5-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:04 crc kubenswrapper[5003]: I0104 11:53:04.703452 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/40dada5c-5a67-4362-b9fb-e49a7fc32307-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:05 crc kubenswrapper[5003]: I0104 11:53:05.165492 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-tk9gz" event={"ID":"8006abe7-31d8-489d-9005-d96d40bb9ba5","Type":"ContainerDied","Data":"7f8437beccc07bc5d223da57b86f240f2a510a4dbe614c1026e139c9bbc085e0"} Jan 04 11:53:05 crc kubenswrapper[5003]: I0104 11:53:05.165621 5003 scope.go:117] "RemoveContainer" containerID="04fe15c1cd00c4072d285d953ee622494d0c7d2d2f1d7202f33d4e3526178996" Jan 04 11:53:05 crc kubenswrapper[5003]: I0104 11:53:05.166240 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-tk9gz" Jan 04 11:53:05 crc kubenswrapper[5003]: I0104 11:53:05.170111 5003 generic.go:334] "Generic (PLEG): container finished" podID="40dada5c-5a67-4362-b9fb-e49a7fc32307" containerID="b22d31f5b66fff9450cc88d09772d17c5a42ff042041cca32e7f7b2b7b98d4a2" exitCode=0 Jan 04 11:53:05 crc kubenswrapper[5003]: I0104 11:53:05.170369 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nxmj9" Jan 04 11:53:05 crc kubenswrapper[5003]: I0104 11:53:05.170613 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nxmj9" event={"ID":"40dada5c-5a67-4362-b9fb-e49a7fc32307","Type":"ContainerDied","Data":"b22d31f5b66fff9450cc88d09772d17c5a42ff042041cca32e7f7b2b7b98d4a2"} Jan 04 11:53:05 crc kubenswrapper[5003]: I0104 11:53:05.170724 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-nxmj9" event={"ID":"40dada5c-5a67-4362-b9fb-e49a7fc32307","Type":"ContainerDied","Data":"ea477a89e313ea3aac2911777d2d07de24287c464bc423950818313935825ea7"} Jan 04 11:53:05 crc kubenswrapper[5003]: I0104 11:53:05.197536 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-nxmj9"] Jan 04 11:53:05 crc kubenswrapper[5003]: I0104 11:53:05.201469 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-nxmj9"] Jan 04 11:53:05 crc kubenswrapper[5003]: I0104 11:53:05.206499 5003 scope.go:117] "RemoveContainer" containerID="b22d31f5b66fff9450cc88d09772d17c5a42ff042041cca32e7f7b2b7b98d4a2" Jan 04 11:53:05 crc kubenswrapper[5003]: I0104 11:53:05.229962 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-tk9gz"] Jan 04 11:53:05 crc kubenswrapper[5003]: I0104 11:53:05.237686 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-tk9gz"] Jan 04 11:53:05 crc kubenswrapper[5003]: I0104 11:53:05.238656 5003 scope.go:117] "RemoveContainer" containerID="b22d31f5b66fff9450cc88d09772d17c5a42ff042041cca32e7f7b2b7b98d4a2" Jan 04 11:53:05 crc kubenswrapper[5003]: E0104 11:53:05.239297 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b22d31f5b66fff9450cc88d09772d17c5a42ff042041cca32e7f7b2b7b98d4a2\": container with ID starting with b22d31f5b66fff9450cc88d09772d17c5a42ff042041cca32e7f7b2b7b98d4a2 not found: ID does not exist" containerID="b22d31f5b66fff9450cc88d09772d17c5a42ff042041cca32e7f7b2b7b98d4a2" Jan 04 11:53:05 crc kubenswrapper[5003]: I0104 11:53:05.239347 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b22d31f5b66fff9450cc88d09772d17c5a42ff042041cca32e7f7b2b7b98d4a2"} err="failed to get container status \"b22d31f5b66fff9450cc88d09772d17c5a42ff042041cca32e7f7b2b7b98d4a2\": rpc error: code = NotFound desc = could not find container \"b22d31f5b66fff9450cc88d09772d17c5a42ff042041cca32e7f7b2b7b98d4a2\": container with ID starting with b22d31f5b66fff9450cc88d09772d17c5a42ff042041cca32e7f7b2b7b98d4a2 not found: ID does not exist" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.195444 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-598cb7b6db-zhfcw"] Jan 04 11:53:06 crc kubenswrapper[5003]: E0104 11:53:06.195933 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8006abe7-31d8-489d-9005-d96d40bb9ba5" containerName="controller-manager" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.195958 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8006abe7-31d8-489d-9005-d96d40bb9ba5" containerName="controller-manager" Jan 04 11:53:06 crc kubenswrapper[5003]: E0104 11:53:06.195984 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40dada5c-5a67-4362-b9fb-e49a7fc32307" containerName="route-controller-manager" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.195998 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="40dada5c-5a67-4362-b9fb-e49a7fc32307" containerName="route-controller-manager" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.196270 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="8006abe7-31d8-489d-9005-d96d40bb9ba5" containerName="controller-manager" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.196309 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="40dada5c-5a67-4362-b9fb-e49a7fc32307" containerName="route-controller-manager" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.197183 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-598cb7b6db-zhfcw" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.199594 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.202088 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.203155 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.203432 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.203672 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.208695 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-8cb8b5bdf-s4ch9"] Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.208918 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.210564 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-8cb8b5bdf-s4ch9" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.215147 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.215185 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.215259 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.215280 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.215442 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.215551 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.217518 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.221557 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-598cb7b6db-zhfcw"] Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.227809 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fe1cf823-a84c-4c16-ba91-1b5ccef99ed6-proxy-ca-bundles\") pod \"controller-manager-598cb7b6db-zhfcw\" (UID: \"fe1cf823-a84c-4c16-ba91-1b5ccef99ed6\") " pod="openshift-controller-manager/controller-manager-598cb7b6db-zhfcw" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.227876 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87l77\" (UniqueName: \"kubernetes.io/projected/7a38c086-67dd-4196-99ce-7693b09037f4-kube-api-access-87l77\") pod \"route-controller-manager-8cb8b5bdf-s4ch9\" (UID: \"7a38c086-67dd-4196-99ce-7693b09037f4\") " pod="openshift-route-controller-manager/route-controller-manager-8cb8b5bdf-s4ch9" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.227953 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvbfl\" (UniqueName: \"kubernetes.io/projected/fe1cf823-a84c-4c16-ba91-1b5ccef99ed6-kube-api-access-cvbfl\") pod \"controller-manager-598cb7b6db-zhfcw\" (UID: \"fe1cf823-a84c-4c16-ba91-1b5ccef99ed6\") " pod="openshift-controller-manager/controller-manager-598cb7b6db-zhfcw" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.227990 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7a38c086-67dd-4196-99ce-7693b09037f4-serving-cert\") pod \"route-controller-manager-8cb8b5bdf-s4ch9\" (UID: \"7a38c086-67dd-4196-99ce-7693b09037f4\") " pod="openshift-route-controller-manager/route-controller-manager-8cb8b5bdf-s4ch9" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.228111 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fe1cf823-a84c-4c16-ba91-1b5ccef99ed6-client-ca\") pod \"controller-manager-598cb7b6db-zhfcw\" (UID: \"fe1cf823-a84c-4c16-ba91-1b5ccef99ed6\") " pod="openshift-controller-manager/controller-manager-598cb7b6db-zhfcw" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.228188 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a38c086-67dd-4196-99ce-7693b09037f4-config\") pod \"route-controller-manager-8cb8b5bdf-s4ch9\" (UID: \"7a38c086-67dd-4196-99ce-7693b09037f4\") " pod="openshift-route-controller-manager/route-controller-manager-8cb8b5bdf-s4ch9" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.228252 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fe1cf823-a84c-4c16-ba91-1b5ccef99ed6-serving-cert\") pod \"controller-manager-598cb7b6db-zhfcw\" (UID: \"fe1cf823-a84c-4c16-ba91-1b5ccef99ed6\") " pod="openshift-controller-manager/controller-manager-598cb7b6db-zhfcw" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.228294 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7a38c086-67dd-4196-99ce-7693b09037f4-client-ca\") pod \"route-controller-manager-8cb8b5bdf-s4ch9\" (UID: \"7a38c086-67dd-4196-99ce-7693b09037f4\") " pod="openshift-route-controller-manager/route-controller-manager-8cb8b5bdf-s4ch9" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.228331 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe1cf823-a84c-4c16-ba91-1b5ccef99ed6-config\") pod \"controller-manager-598cb7b6db-zhfcw\" (UID: \"fe1cf823-a84c-4c16-ba91-1b5ccef99ed6\") " pod="openshift-controller-manager/controller-manager-598cb7b6db-zhfcw" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.228551 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-8cb8b5bdf-s4ch9"] Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.329701 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fe1cf823-a84c-4c16-ba91-1b5ccef99ed6-proxy-ca-bundles\") pod \"controller-manager-598cb7b6db-zhfcw\" (UID: \"fe1cf823-a84c-4c16-ba91-1b5ccef99ed6\") " pod="openshift-controller-manager/controller-manager-598cb7b6db-zhfcw" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.329744 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87l77\" (UniqueName: \"kubernetes.io/projected/7a38c086-67dd-4196-99ce-7693b09037f4-kube-api-access-87l77\") pod \"route-controller-manager-8cb8b5bdf-s4ch9\" (UID: \"7a38c086-67dd-4196-99ce-7693b09037f4\") " pod="openshift-route-controller-manager/route-controller-manager-8cb8b5bdf-s4ch9" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.329783 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvbfl\" (UniqueName: \"kubernetes.io/projected/fe1cf823-a84c-4c16-ba91-1b5ccef99ed6-kube-api-access-cvbfl\") pod \"controller-manager-598cb7b6db-zhfcw\" (UID: \"fe1cf823-a84c-4c16-ba91-1b5ccef99ed6\") " pod="openshift-controller-manager/controller-manager-598cb7b6db-zhfcw" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.329805 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7a38c086-67dd-4196-99ce-7693b09037f4-serving-cert\") pod \"route-controller-manager-8cb8b5bdf-s4ch9\" (UID: \"7a38c086-67dd-4196-99ce-7693b09037f4\") " pod="openshift-route-controller-manager/route-controller-manager-8cb8b5bdf-s4ch9" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.329830 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fe1cf823-a84c-4c16-ba91-1b5ccef99ed6-client-ca\") pod \"controller-manager-598cb7b6db-zhfcw\" (UID: \"fe1cf823-a84c-4c16-ba91-1b5ccef99ed6\") " pod="openshift-controller-manager/controller-manager-598cb7b6db-zhfcw" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.330235 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a38c086-67dd-4196-99ce-7693b09037f4-config\") pod \"route-controller-manager-8cb8b5bdf-s4ch9\" (UID: \"7a38c086-67dd-4196-99ce-7693b09037f4\") " pod="openshift-route-controller-manager/route-controller-manager-8cb8b5bdf-s4ch9" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.330266 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fe1cf823-a84c-4c16-ba91-1b5ccef99ed6-serving-cert\") pod \"controller-manager-598cb7b6db-zhfcw\" (UID: \"fe1cf823-a84c-4c16-ba91-1b5ccef99ed6\") " pod="openshift-controller-manager/controller-manager-598cb7b6db-zhfcw" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.330288 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7a38c086-67dd-4196-99ce-7693b09037f4-client-ca\") pod \"route-controller-manager-8cb8b5bdf-s4ch9\" (UID: \"7a38c086-67dd-4196-99ce-7693b09037f4\") " pod="openshift-route-controller-manager/route-controller-manager-8cb8b5bdf-s4ch9" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.330313 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe1cf823-a84c-4c16-ba91-1b5ccef99ed6-config\") pod \"controller-manager-598cb7b6db-zhfcw\" (UID: \"fe1cf823-a84c-4c16-ba91-1b5ccef99ed6\") " pod="openshift-controller-manager/controller-manager-598cb7b6db-zhfcw" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.331040 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fe1cf823-a84c-4c16-ba91-1b5ccef99ed6-client-ca\") pod \"controller-manager-598cb7b6db-zhfcw\" (UID: \"fe1cf823-a84c-4c16-ba91-1b5ccef99ed6\") " pod="openshift-controller-manager/controller-manager-598cb7b6db-zhfcw" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.331559 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe1cf823-a84c-4c16-ba91-1b5ccef99ed6-config\") pod \"controller-manager-598cb7b6db-zhfcw\" (UID: \"fe1cf823-a84c-4c16-ba91-1b5ccef99ed6\") " pod="openshift-controller-manager/controller-manager-598cb7b6db-zhfcw" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.331695 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fe1cf823-a84c-4c16-ba91-1b5ccef99ed6-proxy-ca-bundles\") pod \"controller-manager-598cb7b6db-zhfcw\" (UID: \"fe1cf823-a84c-4c16-ba91-1b5ccef99ed6\") " pod="openshift-controller-manager/controller-manager-598cb7b6db-zhfcw" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.332125 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7a38c086-67dd-4196-99ce-7693b09037f4-client-ca\") pod \"route-controller-manager-8cb8b5bdf-s4ch9\" (UID: \"7a38c086-67dd-4196-99ce-7693b09037f4\") " pod="openshift-route-controller-manager/route-controller-manager-8cb8b5bdf-s4ch9" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.332267 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a38c086-67dd-4196-99ce-7693b09037f4-config\") pod \"route-controller-manager-8cb8b5bdf-s4ch9\" (UID: \"7a38c086-67dd-4196-99ce-7693b09037f4\") " pod="openshift-route-controller-manager/route-controller-manager-8cb8b5bdf-s4ch9" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.338239 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7a38c086-67dd-4196-99ce-7693b09037f4-serving-cert\") pod \"route-controller-manager-8cb8b5bdf-s4ch9\" (UID: \"7a38c086-67dd-4196-99ce-7693b09037f4\") " pod="openshift-route-controller-manager/route-controller-manager-8cb8b5bdf-s4ch9" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.343132 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fe1cf823-a84c-4c16-ba91-1b5ccef99ed6-serving-cert\") pod \"controller-manager-598cb7b6db-zhfcw\" (UID: \"fe1cf823-a84c-4c16-ba91-1b5ccef99ed6\") " pod="openshift-controller-manager/controller-manager-598cb7b6db-zhfcw" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.357271 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvbfl\" (UniqueName: \"kubernetes.io/projected/fe1cf823-a84c-4c16-ba91-1b5ccef99ed6-kube-api-access-cvbfl\") pod \"controller-manager-598cb7b6db-zhfcw\" (UID: \"fe1cf823-a84c-4c16-ba91-1b5ccef99ed6\") " pod="openshift-controller-manager/controller-manager-598cb7b6db-zhfcw" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.361835 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87l77\" (UniqueName: \"kubernetes.io/projected/7a38c086-67dd-4196-99ce-7693b09037f4-kube-api-access-87l77\") pod \"route-controller-manager-8cb8b5bdf-s4ch9\" (UID: \"7a38c086-67dd-4196-99ce-7693b09037f4\") " pod="openshift-route-controller-manager/route-controller-manager-8cb8b5bdf-s4ch9" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.544975 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-598cb7b6db-zhfcw" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.551174 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-8cb8b5bdf-s4ch9" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.821165 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40dada5c-5a67-4362-b9fb-e49a7fc32307" path="/var/lib/kubelet/pods/40dada5c-5a67-4362-b9fb-e49a7fc32307/volumes" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.822413 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8006abe7-31d8-489d-9005-d96d40bb9ba5" path="/var/lib/kubelet/pods/8006abe7-31d8-489d-9005-d96d40bb9ba5/volumes" Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.832527 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-598cb7b6db-zhfcw"] Jan 04 11:53:06 crc kubenswrapper[5003]: I0104 11:53:06.865777 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-8cb8b5bdf-s4ch9"] Jan 04 11:53:06 crc kubenswrapper[5003]: W0104 11:53:06.868127 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7a38c086_67dd_4196_99ce_7693b09037f4.slice/crio-883f0cd0b211a263063faa604bd7675a015a074be68da73e64baa52edd8872eb WatchSource:0}: Error finding container 883f0cd0b211a263063faa604bd7675a015a074be68da73e64baa52edd8872eb: Status 404 returned error can't find the container with id 883f0cd0b211a263063faa604bd7675a015a074be68da73e64baa52edd8872eb Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.190388 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mxr5x"] Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.190704 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mxr5x" podUID="b4141980-5ab0-4976-81aa-80a2245ae245" containerName="registry-server" containerID="cri-o://685bb52bd3f838111357a4ab20126addcf1f043c7a5d2e27e11d64b56856426e" gracePeriod=30 Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.200711 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-8cb8b5bdf-s4ch9" event={"ID":"7a38c086-67dd-4196-99ce-7693b09037f4","Type":"ContainerStarted","Data":"5d7b3930104e67191b306f5d3fc3854395d5fe97d86cc316c2e5300a13ac4fde"} Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.200765 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-8cb8b5bdf-s4ch9" event={"ID":"7a38c086-67dd-4196-99ce-7693b09037f4","Type":"ContainerStarted","Data":"883f0cd0b211a263063faa604bd7675a015a074be68da73e64baa52edd8872eb"} Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.203248 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-8cb8b5bdf-s4ch9" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.203272 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gjbcx"] Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.203423 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-gjbcx" podUID="6c5933bf-668f-4062-998b-3b2c5ad3a811" containerName="registry-server" containerID="cri-o://1a3bb1c5dec92ba0fd4ff3674a5b9dc46cb3899f051fd6517cc26326fd3d6739" gracePeriod=30 Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.211801 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-m6xcl"] Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.211992 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-m6xcl" podUID="fad64100-7cb6-4457-9b74-ccc0cdf1dbb1" containerName="marketplace-operator" containerID="cri-o://5c3c6798e59f9cfbd2e2e376c092ec2d030730d2bb7ec1e822804e8244c1648f" gracePeriod=30 Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.216781 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-598cb7b6db-zhfcw" event={"ID":"fe1cf823-a84c-4c16-ba91-1b5ccef99ed6","Type":"ContainerStarted","Data":"2d7e6e9196c94e0fe46daa302077726149c33afc47c3adcbc2f45c8fabb02032"} Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.216825 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-598cb7b6db-zhfcw" event={"ID":"fe1cf823-a84c-4c16-ba91-1b5ccef99ed6","Type":"ContainerStarted","Data":"69118f59cfbcdbb426c89c69b1b3e9e97d25369b2df2e907f8711f786fdbcdfa"} Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.218094 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-598cb7b6db-zhfcw" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.226998 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7ghcn"] Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.227241 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7ghcn" podUID="cf4075fd-a261-4d15-b6e3-02d8c346fe74" containerName="registry-server" containerID="cri-o://ce61c8dd89c7bfc58945322ffd47c42c172382856c04a0f02db4a657ae384fc8" gracePeriod=30 Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.233577 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9t6r8"] Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.233800 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-9t6r8" podUID="527e50de-5345-49f4-9ce8-3aaf8d446bed" containerName="registry-server" containerID="cri-o://240f370141e056c8e328a585ad73edd8c28f502b0c8f9f23c424c18e1557ecd3" gracePeriod=30 Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.245196 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-598cb7b6db-zhfcw" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.246670 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-8cb8b5bdf-s4ch9" podStartSLOduration=3.246653258 podStartE2EDuration="3.246653258s" podCreationTimestamp="2026-01-04 11:53:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:53:07.243641746 +0000 UTC m=+302.716671597" watchObservedRunningTime="2026-01-04 11:53:07.246653258 +0000 UTC m=+302.719683099" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.247679 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7z829"] Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.248293 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-7z829" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.272641 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7z829"] Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.325464 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-8cb8b5bdf-s4ch9" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.328653 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-598cb7b6db-zhfcw" podStartSLOduration=3.328640516 podStartE2EDuration="3.328640516s" podCreationTimestamp="2026-01-04 11:53:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:53:07.283865554 +0000 UTC m=+302.756895405" watchObservedRunningTime="2026-01-04 11:53:07.328640516 +0000 UTC m=+302.801670347" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.455618 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/bb982b9b-26c8-4b54-b519-5d1c78c38ada-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-7z829\" (UID: \"bb982b9b-26c8-4b54-b519-5d1c78c38ada\") " pod="openshift-marketplace/marketplace-operator-79b997595-7z829" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.455669 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5lcj\" (UniqueName: \"kubernetes.io/projected/bb982b9b-26c8-4b54-b519-5d1c78c38ada-kube-api-access-p5lcj\") pod \"marketplace-operator-79b997595-7z829\" (UID: \"bb982b9b-26c8-4b54-b519-5d1c78c38ada\") " pod="openshift-marketplace/marketplace-operator-79b997595-7z829" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.455707 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bb982b9b-26c8-4b54-b519-5d1c78c38ada-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-7z829\" (UID: \"bb982b9b-26c8-4b54-b519-5d1c78c38ada\") " pod="openshift-marketplace/marketplace-operator-79b997595-7z829" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.557412 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/bb982b9b-26c8-4b54-b519-5d1c78c38ada-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-7z829\" (UID: \"bb982b9b-26c8-4b54-b519-5d1c78c38ada\") " pod="openshift-marketplace/marketplace-operator-79b997595-7z829" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.557470 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5lcj\" (UniqueName: \"kubernetes.io/projected/bb982b9b-26c8-4b54-b519-5d1c78c38ada-kube-api-access-p5lcj\") pod \"marketplace-operator-79b997595-7z829\" (UID: \"bb982b9b-26c8-4b54-b519-5d1c78c38ada\") " pod="openshift-marketplace/marketplace-operator-79b997595-7z829" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.557512 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bb982b9b-26c8-4b54-b519-5d1c78c38ada-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-7z829\" (UID: \"bb982b9b-26c8-4b54-b519-5d1c78c38ada\") " pod="openshift-marketplace/marketplace-operator-79b997595-7z829" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.559884 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bb982b9b-26c8-4b54-b519-5d1c78c38ada-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-7z829\" (UID: \"bb982b9b-26c8-4b54-b519-5d1c78c38ada\") " pod="openshift-marketplace/marketplace-operator-79b997595-7z829" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.580393 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/bb982b9b-26c8-4b54-b519-5d1c78c38ada-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-7z829\" (UID: \"bb982b9b-26c8-4b54-b519-5d1c78c38ada\") " pod="openshift-marketplace/marketplace-operator-79b997595-7z829" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.580435 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5lcj\" (UniqueName: \"kubernetes.io/projected/bb982b9b-26c8-4b54-b519-5d1c78c38ada-kube-api-access-p5lcj\") pod \"marketplace-operator-79b997595-7z829\" (UID: \"bb982b9b-26c8-4b54-b519-5d1c78c38ada\") " pod="openshift-marketplace/marketplace-operator-79b997595-7z829" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.695915 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mxr5x" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.745431 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7ghcn" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.757339 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-m6xcl" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.761064 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/fad64100-7cb6-4457-9b74-ccc0cdf1dbb1-marketplace-operator-metrics\") pod \"fad64100-7cb6-4457-9b74-ccc0cdf1dbb1\" (UID: \"fad64100-7cb6-4457-9b74-ccc0cdf1dbb1\") " Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.761129 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf4075fd-a261-4d15-b6e3-02d8c346fe74-utilities\") pod \"cf4075fd-a261-4d15-b6e3-02d8c346fe74\" (UID: \"cf4075fd-a261-4d15-b6e3-02d8c346fe74\") " Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.761199 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fad64100-7cb6-4457-9b74-ccc0cdf1dbb1-marketplace-trusted-ca\") pod \"fad64100-7cb6-4457-9b74-ccc0cdf1dbb1\" (UID: \"fad64100-7cb6-4457-9b74-ccc0cdf1dbb1\") " Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.761269 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf4075fd-a261-4d15-b6e3-02d8c346fe74-catalog-content\") pod \"cf4075fd-a261-4d15-b6e3-02d8c346fe74\" (UID: \"cf4075fd-a261-4d15-b6e3-02d8c346fe74\") " Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.761331 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4141980-5ab0-4976-81aa-80a2245ae245-catalog-content\") pod \"b4141980-5ab0-4976-81aa-80a2245ae245\" (UID: \"b4141980-5ab0-4976-81aa-80a2245ae245\") " Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.761354 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xbgtq\" (UniqueName: \"kubernetes.io/projected/cf4075fd-a261-4d15-b6e3-02d8c346fe74-kube-api-access-xbgtq\") pod \"cf4075fd-a261-4d15-b6e3-02d8c346fe74\" (UID: \"cf4075fd-a261-4d15-b6e3-02d8c346fe74\") " Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.761399 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4141980-5ab0-4976-81aa-80a2245ae245-utilities\") pod \"b4141980-5ab0-4976-81aa-80a2245ae245\" (UID: \"b4141980-5ab0-4976-81aa-80a2245ae245\") " Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.761419 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qhn9f\" (UniqueName: \"kubernetes.io/projected/fad64100-7cb6-4457-9b74-ccc0cdf1dbb1-kube-api-access-qhn9f\") pod \"fad64100-7cb6-4457-9b74-ccc0cdf1dbb1\" (UID: \"fad64100-7cb6-4457-9b74-ccc0cdf1dbb1\") " Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.761434 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htccq\" (UniqueName: \"kubernetes.io/projected/b4141980-5ab0-4976-81aa-80a2245ae245-kube-api-access-htccq\") pod \"b4141980-5ab0-4976-81aa-80a2245ae245\" (UID: \"b4141980-5ab0-4976-81aa-80a2245ae245\") " Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.762250 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fad64100-7cb6-4457-9b74-ccc0cdf1dbb1-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "fad64100-7cb6-4457-9b74-ccc0cdf1dbb1" (UID: "fad64100-7cb6-4457-9b74-ccc0cdf1dbb1"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.763033 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf4075fd-a261-4d15-b6e3-02d8c346fe74-utilities" (OuterVolumeSpecName: "utilities") pod "cf4075fd-a261-4d15-b6e3-02d8c346fe74" (UID: "cf4075fd-a261-4d15-b6e3-02d8c346fe74"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.763779 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4141980-5ab0-4976-81aa-80a2245ae245-utilities" (OuterVolumeSpecName: "utilities") pod "b4141980-5ab0-4976-81aa-80a2245ae245" (UID: "b4141980-5ab0-4976-81aa-80a2245ae245"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.768876 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4141980-5ab0-4976-81aa-80a2245ae245-kube-api-access-htccq" (OuterVolumeSpecName: "kube-api-access-htccq") pod "b4141980-5ab0-4976-81aa-80a2245ae245" (UID: "b4141980-5ab0-4976-81aa-80a2245ae245"). InnerVolumeSpecName "kube-api-access-htccq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.770378 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9t6r8" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.771199 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fad64100-7cb6-4457-9b74-ccc0cdf1dbb1-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "fad64100-7cb6-4457-9b74-ccc0cdf1dbb1" (UID: "fad64100-7cb6-4457-9b74-ccc0cdf1dbb1"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.772630 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fad64100-7cb6-4457-9b74-ccc0cdf1dbb1-kube-api-access-qhn9f" (OuterVolumeSpecName: "kube-api-access-qhn9f") pod "fad64100-7cb6-4457-9b74-ccc0cdf1dbb1" (UID: "fad64100-7cb6-4457-9b74-ccc0cdf1dbb1"). InnerVolumeSpecName "kube-api-access-qhn9f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.773355 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf4075fd-a261-4d15-b6e3-02d8c346fe74-kube-api-access-xbgtq" (OuterVolumeSpecName: "kube-api-access-xbgtq") pod "cf4075fd-a261-4d15-b6e3-02d8c346fe74" (UID: "cf4075fd-a261-4d15-b6e3-02d8c346fe74"). InnerVolumeSpecName "kube-api-access-xbgtq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.794253 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gjbcx" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.802370 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf4075fd-a261-4d15-b6e3-02d8c346fe74-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cf4075fd-a261-4d15-b6e3-02d8c346fe74" (UID: "cf4075fd-a261-4d15-b6e3-02d8c346fe74"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.832830 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4141980-5ab0-4976-81aa-80a2245ae245-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b4141980-5ab0-4976-81aa-80a2245ae245" (UID: "b4141980-5ab0-4976-81aa-80a2245ae245"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.862206 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c5933bf-668f-4062-998b-3b2c5ad3a811-utilities\") pod \"6c5933bf-668f-4062-998b-3b2c5ad3a811\" (UID: \"6c5933bf-668f-4062-998b-3b2c5ad3a811\") " Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.862262 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/527e50de-5345-49f4-9ce8-3aaf8d446bed-utilities\") pod \"527e50de-5345-49f4-9ce8-3aaf8d446bed\" (UID: \"527e50de-5345-49f4-9ce8-3aaf8d446bed\") " Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.862305 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d975\" (UniqueName: \"kubernetes.io/projected/6c5933bf-668f-4062-998b-3b2c5ad3a811-kube-api-access-4d975\") pod \"6c5933bf-668f-4062-998b-3b2c5ad3a811\" (UID: \"6c5933bf-668f-4062-998b-3b2c5ad3a811\") " Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.862356 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c5933bf-668f-4062-998b-3b2c5ad3a811-catalog-content\") pod \"6c5933bf-668f-4062-998b-3b2c5ad3a811\" (UID: \"6c5933bf-668f-4062-998b-3b2c5ad3a811\") " Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.862376 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/527e50de-5345-49f4-9ce8-3aaf8d446bed-catalog-content\") pod \"527e50de-5345-49f4-9ce8-3aaf8d446bed\" (UID: \"527e50de-5345-49f4-9ce8-3aaf8d446bed\") " Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.862400 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6kjrn\" (UniqueName: \"kubernetes.io/projected/527e50de-5345-49f4-9ce8-3aaf8d446bed-kube-api-access-6kjrn\") pod \"527e50de-5345-49f4-9ce8-3aaf8d446bed\" (UID: \"527e50de-5345-49f4-9ce8-3aaf8d446bed\") " Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.862579 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xbgtq\" (UniqueName: \"kubernetes.io/projected/cf4075fd-a261-4d15-b6e3-02d8c346fe74-kube-api-access-xbgtq\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.862595 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4141980-5ab0-4976-81aa-80a2245ae245-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.862607 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qhn9f\" (UniqueName: \"kubernetes.io/projected/fad64100-7cb6-4457-9b74-ccc0cdf1dbb1-kube-api-access-qhn9f\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.862618 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htccq\" (UniqueName: \"kubernetes.io/projected/b4141980-5ab0-4976-81aa-80a2245ae245-kube-api-access-htccq\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.862630 5003 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/fad64100-7cb6-4457-9b74-ccc0cdf1dbb1-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.862642 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf4075fd-a261-4d15-b6e3-02d8c346fe74-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.862652 5003 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fad64100-7cb6-4457-9b74-ccc0cdf1dbb1-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.862662 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf4075fd-a261-4d15-b6e3-02d8c346fe74-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.862674 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4141980-5ab0-4976-81aa-80a2245ae245-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.863667 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/527e50de-5345-49f4-9ce8-3aaf8d446bed-utilities" (OuterVolumeSpecName: "utilities") pod "527e50de-5345-49f4-9ce8-3aaf8d446bed" (UID: "527e50de-5345-49f4-9ce8-3aaf8d446bed"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.863692 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c5933bf-668f-4062-998b-3b2c5ad3a811-utilities" (OuterVolumeSpecName: "utilities") pod "6c5933bf-668f-4062-998b-3b2c5ad3a811" (UID: "6c5933bf-668f-4062-998b-3b2c5ad3a811"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.866347 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c5933bf-668f-4062-998b-3b2c5ad3a811-kube-api-access-4d975" (OuterVolumeSpecName: "kube-api-access-4d975") pod "6c5933bf-668f-4062-998b-3b2c5ad3a811" (UID: "6c5933bf-668f-4062-998b-3b2c5ad3a811"). InnerVolumeSpecName "kube-api-access-4d975". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.866513 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/527e50de-5345-49f4-9ce8-3aaf8d446bed-kube-api-access-6kjrn" (OuterVolumeSpecName: "kube-api-access-6kjrn") pod "527e50de-5345-49f4-9ce8-3aaf8d446bed" (UID: "527e50de-5345-49f4-9ce8-3aaf8d446bed"). InnerVolumeSpecName "kube-api-access-6kjrn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.873830 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-7z829" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.921514 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c5933bf-668f-4062-998b-3b2c5ad3a811-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6c5933bf-668f-4062-998b-3b2c5ad3a811" (UID: "6c5933bf-668f-4062-998b-3b2c5ad3a811"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.963065 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c5933bf-668f-4062-998b-3b2c5ad3a811-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.963106 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/527e50de-5345-49f4-9ce8-3aaf8d446bed-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.963123 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d975\" (UniqueName: \"kubernetes.io/projected/6c5933bf-668f-4062-998b-3b2c5ad3a811-kube-api-access-4d975\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.963132 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c5933bf-668f-4062-998b-3b2c5ad3a811-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:07 crc kubenswrapper[5003]: I0104 11:53:07.963141 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6kjrn\" (UniqueName: \"kubernetes.io/projected/527e50de-5345-49f4-9ce8-3aaf8d446bed-kube-api-access-6kjrn\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.018786 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/527e50de-5345-49f4-9ce8-3aaf8d446bed-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "527e50de-5345-49f4-9ce8-3aaf8d446bed" (UID: "527e50de-5345-49f4-9ce8-3aaf8d446bed"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.064233 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/527e50de-5345-49f4-9ce8-3aaf8d446bed-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.083819 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7z829"] Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.236000 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7z829" event={"ID":"bb982b9b-26c8-4b54-b519-5d1c78c38ada","Type":"ContainerStarted","Data":"1c48f0346909b28b0ced17732e9ca32850696baed2ea6c8cf792f220f91c2598"} Jan 04 11:53:08 crc kubenswrapper[5003]: E0104 11:53:08.236523 5003 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod40dada5c_5a67_4362_b9fb_e49a7fc32307.slice/crio-ea477a89e313ea3aac2911777d2d07de24287c464bc423950818313935825ea7\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod40dada5c_5a67_4362_b9fb_e49a7fc32307.slice\": RecentStats: unable to find data in memory cache]" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.238456 5003 generic.go:334] "Generic (PLEG): container finished" podID="6c5933bf-668f-4062-998b-3b2c5ad3a811" containerID="1a3bb1c5dec92ba0fd4ff3674a5b9dc46cb3899f051fd6517cc26326fd3d6739" exitCode=0 Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.238560 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gjbcx" event={"ID":"6c5933bf-668f-4062-998b-3b2c5ad3a811","Type":"ContainerDied","Data":"1a3bb1c5dec92ba0fd4ff3674a5b9dc46cb3899f051fd6517cc26326fd3d6739"} Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.238638 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gjbcx" event={"ID":"6c5933bf-668f-4062-998b-3b2c5ad3a811","Type":"ContainerDied","Data":"a7e21f8e1266e0bd14e8e195ea5a9cc5166348407feaa6d60aaf96bdb65b37ad"} Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.238694 5003 scope.go:117] "RemoveContainer" containerID="1a3bb1c5dec92ba0fd4ff3674a5b9dc46cb3899f051fd6517cc26326fd3d6739" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.238994 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gjbcx" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.242728 5003 generic.go:334] "Generic (PLEG): container finished" podID="fad64100-7cb6-4457-9b74-ccc0cdf1dbb1" containerID="5c3c6798e59f9cfbd2e2e376c092ec2d030730d2bb7ec1e822804e8244c1648f" exitCode=0 Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.242815 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-m6xcl" event={"ID":"fad64100-7cb6-4457-9b74-ccc0cdf1dbb1","Type":"ContainerDied","Data":"5c3c6798e59f9cfbd2e2e376c092ec2d030730d2bb7ec1e822804e8244c1648f"} Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.243293 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-m6xcl" event={"ID":"fad64100-7cb6-4457-9b74-ccc0cdf1dbb1","Type":"ContainerDied","Data":"7ee4f9577180ff79da5ab2a59244787d1b2071c759c15e9e7518f9a606e3afed"} Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.242884 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-m6xcl" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.247907 5003 generic.go:334] "Generic (PLEG): container finished" podID="cf4075fd-a261-4d15-b6e3-02d8c346fe74" containerID="ce61c8dd89c7bfc58945322ffd47c42c172382856c04a0f02db4a657ae384fc8" exitCode=0 Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.248118 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7ghcn" event={"ID":"cf4075fd-a261-4d15-b6e3-02d8c346fe74","Type":"ContainerDied","Data":"ce61c8dd89c7bfc58945322ffd47c42c172382856c04a0f02db4a657ae384fc8"} Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.248231 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7ghcn" event={"ID":"cf4075fd-a261-4d15-b6e3-02d8c346fe74","Type":"ContainerDied","Data":"4ae870a1d1a128e5e6790914ad76aa86367e90369f3a4898901411bab9da3a34"} Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.248423 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7ghcn" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.252028 5003 generic.go:334] "Generic (PLEG): container finished" podID="b4141980-5ab0-4976-81aa-80a2245ae245" containerID="685bb52bd3f838111357a4ab20126addcf1f043c7a5d2e27e11d64b56856426e" exitCode=0 Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.252094 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mxr5x" event={"ID":"b4141980-5ab0-4976-81aa-80a2245ae245","Type":"ContainerDied","Data":"685bb52bd3f838111357a4ab20126addcf1f043c7a5d2e27e11d64b56856426e"} Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.252120 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mxr5x" event={"ID":"b4141980-5ab0-4976-81aa-80a2245ae245","Type":"ContainerDied","Data":"fa6edab9782fc6034e67a540a2aacc5f26263ac614cc4a28eb01ad30a6c0ff85"} Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.252195 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mxr5x" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.254743 5003 scope.go:117] "RemoveContainer" containerID="39eb7bc8a97905a00d340f01c044347d2affb6c1390327ca0afdb32c539005d1" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.255654 5003 generic.go:334] "Generic (PLEG): container finished" podID="527e50de-5345-49f4-9ce8-3aaf8d446bed" containerID="240f370141e056c8e328a585ad73edd8c28f502b0c8f9f23c424c18e1557ecd3" exitCode=0 Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.256202 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9t6r8" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.256495 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9t6r8" event={"ID":"527e50de-5345-49f4-9ce8-3aaf8d446bed","Type":"ContainerDied","Data":"240f370141e056c8e328a585ad73edd8c28f502b0c8f9f23c424c18e1557ecd3"} Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.256765 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9t6r8" event={"ID":"527e50de-5345-49f4-9ce8-3aaf8d446bed","Type":"ContainerDied","Data":"23b86fb9f2d3d9f5dae54cce4db63cef614b0934a302861c64d45303ba9aea1a"} Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.287053 5003 scope.go:117] "RemoveContainer" containerID="6f9a684eb697ec549e409cdf6475b17429be57144e4bb92eec576de2a2558240" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.308091 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7ghcn"] Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.315985 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7ghcn"] Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.318994 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mxr5x"] Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.320352 5003 scope.go:117] "RemoveContainer" containerID="1a3bb1c5dec92ba0fd4ff3674a5b9dc46cb3899f051fd6517cc26326fd3d6739" Jan 04 11:53:08 crc kubenswrapper[5003]: E0104 11:53:08.320824 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a3bb1c5dec92ba0fd4ff3674a5b9dc46cb3899f051fd6517cc26326fd3d6739\": container with ID starting with 1a3bb1c5dec92ba0fd4ff3674a5b9dc46cb3899f051fd6517cc26326fd3d6739 not found: ID does not exist" containerID="1a3bb1c5dec92ba0fd4ff3674a5b9dc46cb3899f051fd6517cc26326fd3d6739" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.320874 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a3bb1c5dec92ba0fd4ff3674a5b9dc46cb3899f051fd6517cc26326fd3d6739"} err="failed to get container status \"1a3bb1c5dec92ba0fd4ff3674a5b9dc46cb3899f051fd6517cc26326fd3d6739\": rpc error: code = NotFound desc = could not find container \"1a3bb1c5dec92ba0fd4ff3674a5b9dc46cb3899f051fd6517cc26326fd3d6739\": container with ID starting with 1a3bb1c5dec92ba0fd4ff3674a5b9dc46cb3899f051fd6517cc26326fd3d6739 not found: ID does not exist" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.320914 5003 scope.go:117] "RemoveContainer" containerID="39eb7bc8a97905a00d340f01c044347d2affb6c1390327ca0afdb32c539005d1" Jan 04 11:53:08 crc kubenswrapper[5003]: E0104 11:53:08.322349 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39eb7bc8a97905a00d340f01c044347d2affb6c1390327ca0afdb32c539005d1\": container with ID starting with 39eb7bc8a97905a00d340f01c044347d2affb6c1390327ca0afdb32c539005d1 not found: ID does not exist" containerID="39eb7bc8a97905a00d340f01c044347d2affb6c1390327ca0afdb32c539005d1" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.322406 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39eb7bc8a97905a00d340f01c044347d2affb6c1390327ca0afdb32c539005d1"} err="failed to get container status \"39eb7bc8a97905a00d340f01c044347d2affb6c1390327ca0afdb32c539005d1\": rpc error: code = NotFound desc = could not find container \"39eb7bc8a97905a00d340f01c044347d2affb6c1390327ca0afdb32c539005d1\": container with ID starting with 39eb7bc8a97905a00d340f01c044347d2affb6c1390327ca0afdb32c539005d1 not found: ID does not exist" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.322441 5003 scope.go:117] "RemoveContainer" containerID="6f9a684eb697ec549e409cdf6475b17429be57144e4bb92eec576de2a2558240" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.322954 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mxr5x"] Jan 04 11:53:08 crc kubenswrapper[5003]: E0104 11:53:08.323301 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f9a684eb697ec549e409cdf6475b17429be57144e4bb92eec576de2a2558240\": container with ID starting with 6f9a684eb697ec549e409cdf6475b17429be57144e4bb92eec576de2a2558240 not found: ID does not exist" containerID="6f9a684eb697ec549e409cdf6475b17429be57144e4bb92eec576de2a2558240" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.324468 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f9a684eb697ec549e409cdf6475b17429be57144e4bb92eec576de2a2558240"} err="failed to get container status \"6f9a684eb697ec549e409cdf6475b17429be57144e4bb92eec576de2a2558240\": rpc error: code = NotFound desc = could not find container \"6f9a684eb697ec549e409cdf6475b17429be57144e4bb92eec576de2a2558240\": container with ID starting with 6f9a684eb697ec549e409cdf6475b17429be57144e4bb92eec576de2a2558240 not found: ID does not exist" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.324493 5003 scope.go:117] "RemoveContainer" containerID="5c3c6798e59f9cfbd2e2e376c092ec2d030730d2bb7ec1e822804e8244c1648f" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.327691 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gjbcx"] Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.331874 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-gjbcx"] Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.339397 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9t6r8"] Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.342368 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9t6r8"] Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.346140 5003 scope.go:117] "RemoveContainer" containerID="eacbcaa6129ad886bd2c0d4a6ad42d438a67109e876818fe359a25ea80bff258" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.354112 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-m6xcl"] Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.365367 5003 scope.go:117] "RemoveContainer" containerID="5c3c6798e59f9cfbd2e2e376c092ec2d030730d2bb7ec1e822804e8244c1648f" Jan 04 11:53:08 crc kubenswrapper[5003]: E0104 11:53:08.366388 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c3c6798e59f9cfbd2e2e376c092ec2d030730d2bb7ec1e822804e8244c1648f\": container with ID starting with 5c3c6798e59f9cfbd2e2e376c092ec2d030730d2bb7ec1e822804e8244c1648f not found: ID does not exist" containerID="5c3c6798e59f9cfbd2e2e376c092ec2d030730d2bb7ec1e822804e8244c1648f" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.366422 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c3c6798e59f9cfbd2e2e376c092ec2d030730d2bb7ec1e822804e8244c1648f"} err="failed to get container status \"5c3c6798e59f9cfbd2e2e376c092ec2d030730d2bb7ec1e822804e8244c1648f\": rpc error: code = NotFound desc = could not find container \"5c3c6798e59f9cfbd2e2e376c092ec2d030730d2bb7ec1e822804e8244c1648f\": container with ID starting with 5c3c6798e59f9cfbd2e2e376c092ec2d030730d2bb7ec1e822804e8244c1648f not found: ID does not exist" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.366447 5003 scope.go:117] "RemoveContainer" containerID="eacbcaa6129ad886bd2c0d4a6ad42d438a67109e876818fe359a25ea80bff258" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.367030 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-m6xcl"] Jan 04 11:53:08 crc kubenswrapper[5003]: E0104 11:53:08.367212 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eacbcaa6129ad886bd2c0d4a6ad42d438a67109e876818fe359a25ea80bff258\": container with ID starting with eacbcaa6129ad886bd2c0d4a6ad42d438a67109e876818fe359a25ea80bff258 not found: ID does not exist" containerID="eacbcaa6129ad886bd2c0d4a6ad42d438a67109e876818fe359a25ea80bff258" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.367255 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eacbcaa6129ad886bd2c0d4a6ad42d438a67109e876818fe359a25ea80bff258"} err="failed to get container status \"eacbcaa6129ad886bd2c0d4a6ad42d438a67109e876818fe359a25ea80bff258\": rpc error: code = NotFound desc = could not find container \"eacbcaa6129ad886bd2c0d4a6ad42d438a67109e876818fe359a25ea80bff258\": container with ID starting with eacbcaa6129ad886bd2c0d4a6ad42d438a67109e876818fe359a25ea80bff258 not found: ID does not exist" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.367292 5003 scope.go:117] "RemoveContainer" containerID="ce61c8dd89c7bfc58945322ffd47c42c172382856c04a0f02db4a657ae384fc8" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.406651 5003 scope.go:117] "RemoveContainer" containerID="257c82e4a8eb9d14ae6b3c3f1d6c8aeee276988b29757efe5bc524267cf47892" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.436361 5003 scope.go:117] "RemoveContainer" containerID="14a3ba3e10485d554f246879628bbabccad7fd997873ad7b062cd3bcebf49f79" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.462791 5003 scope.go:117] "RemoveContainer" containerID="ce61c8dd89c7bfc58945322ffd47c42c172382856c04a0f02db4a657ae384fc8" Jan 04 11:53:08 crc kubenswrapper[5003]: E0104 11:53:08.463979 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce61c8dd89c7bfc58945322ffd47c42c172382856c04a0f02db4a657ae384fc8\": container with ID starting with ce61c8dd89c7bfc58945322ffd47c42c172382856c04a0f02db4a657ae384fc8 not found: ID does not exist" containerID="ce61c8dd89c7bfc58945322ffd47c42c172382856c04a0f02db4a657ae384fc8" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.464043 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce61c8dd89c7bfc58945322ffd47c42c172382856c04a0f02db4a657ae384fc8"} err="failed to get container status \"ce61c8dd89c7bfc58945322ffd47c42c172382856c04a0f02db4a657ae384fc8\": rpc error: code = NotFound desc = could not find container \"ce61c8dd89c7bfc58945322ffd47c42c172382856c04a0f02db4a657ae384fc8\": container with ID starting with ce61c8dd89c7bfc58945322ffd47c42c172382856c04a0f02db4a657ae384fc8 not found: ID does not exist" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.464071 5003 scope.go:117] "RemoveContainer" containerID="257c82e4a8eb9d14ae6b3c3f1d6c8aeee276988b29757efe5bc524267cf47892" Jan 04 11:53:08 crc kubenswrapper[5003]: E0104 11:53:08.464702 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"257c82e4a8eb9d14ae6b3c3f1d6c8aeee276988b29757efe5bc524267cf47892\": container with ID starting with 257c82e4a8eb9d14ae6b3c3f1d6c8aeee276988b29757efe5bc524267cf47892 not found: ID does not exist" containerID="257c82e4a8eb9d14ae6b3c3f1d6c8aeee276988b29757efe5bc524267cf47892" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.464754 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"257c82e4a8eb9d14ae6b3c3f1d6c8aeee276988b29757efe5bc524267cf47892"} err="failed to get container status \"257c82e4a8eb9d14ae6b3c3f1d6c8aeee276988b29757efe5bc524267cf47892\": rpc error: code = NotFound desc = could not find container \"257c82e4a8eb9d14ae6b3c3f1d6c8aeee276988b29757efe5bc524267cf47892\": container with ID starting with 257c82e4a8eb9d14ae6b3c3f1d6c8aeee276988b29757efe5bc524267cf47892 not found: ID does not exist" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.464789 5003 scope.go:117] "RemoveContainer" containerID="14a3ba3e10485d554f246879628bbabccad7fd997873ad7b062cd3bcebf49f79" Jan 04 11:53:08 crc kubenswrapper[5003]: E0104 11:53:08.465549 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14a3ba3e10485d554f246879628bbabccad7fd997873ad7b062cd3bcebf49f79\": container with ID starting with 14a3ba3e10485d554f246879628bbabccad7fd997873ad7b062cd3bcebf49f79 not found: ID does not exist" containerID="14a3ba3e10485d554f246879628bbabccad7fd997873ad7b062cd3bcebf49f79" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.465581 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14a3ba3e10485d554f246879628bbabccad7fd997873ad7b062cd3bcebf49f79"} err="failed to get container status \"14a3ba3e10485d554f246879628bbabccad7fd997873ad7b062cd3bcebf49f79\": rpc error: code = NotFound desc = could not find container \"14a3ba3e10485d554f246879628bbabccad7fd997873ad7b062cd3bcebf49f79\": container with ID starting with 14a3ba3e10485d554f246879628bbabccad7fd997873ad7b062cd3bcebf49f79 not found: ID does not exist" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.465640 5003 scope.go:117] "RemoveContainer" containerID="685bb52bd3f838111357a4ab20126addcf1f043c7a5d2e27e11d64b56856426e" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.480443 5003 scope.go:117] "RemoveContainer" containerID="f720f9f7d17b7226d0a9dfa47cb6f463f46360a512ccf72b364517ddd81ad339" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.499108 5003 scope.go:117] "RemoveContainer" containerID="f5048e0ad3480bc9ae224ba2856a499ea7b1d444ef5354f53f40c842192c1030" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.513403 5003 scope.go:117] "RemoveContainer" containerID="685bb52bd3f838111357a4ab20126addcf1f043c7a5d2e27e11d64b56856426e" Jan 04 11:53:08 crc kubenswrapper[5003]: E0104 11:53:08.513958 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"685bb52bd3f838111357a4ab20126addcf1f043c7a5d2e27e11d64b56856426e\": container with ID starting with 685bb52bd3f838111357a4ab20126addcf1f043c7a5d2e27e11d64b56856426e not found: ID does not exist" containerID="685bb52bd3f838111357a4ab20126addcf1f043c7a5d2e27e11d64b56856426e" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.513985 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"685bb52bd3f838111357a4ab20126addcf1f043c7a5d2e27e11d64b56856426e"} err="failed to get container status \"685bb52bd3f838111357a4ab20126addcf1f043c7a5d2e27e11d64b56856426e\": rpc error: code = NotFound desc = could not find container \"685bb52bd3f838111357a4ab20126addcf1f043c7a5d2e27e11d64b56856426e\": container with ID starting with 685bb52bd3f838111357a4ab20126addcf1f043c7a5d2e27e11d64b56856426e not found: ID does not exist" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.514008 5003 scope.go:117] "RemoveContainer" containerID="f720f9f7d17b7226d0a9dfa47cb6f463f46360a512ccf72b364517ddd81ad339" Jan 04 11:53:08 crc kubenswrapper[5003]: E0104 11:53:08.514382 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f720f9f7d17b7226d0a9dfa47cb6f463f46360a512ccf72b364517ddd81ad339\": container with ID starting with f720f9f7d17b7226d0a9dfa47cb6f463f46360a512ccf72b364517ddd81ad339 not found: ID does not exist" containerID="f720f9f7d17b7226d0a9dfa47cb6f463f46360a512ccf72b364517ddd81ad339" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.514424 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f720f9f7d17b7226d0a9dfa47cb6f463f46360a512ccf72b364517ddd81ad339"} err="failed to get container status \"f720f9f7d17b7226d0a9dfa47cb6f463f46360a512ccf72b364517ddd81ad339\": rpc error: code = NotFound desc = could not find container \"f720f9f7d17b7226d0a9dfa47cb6f463f46360a512ccf72b364517ddd81ad339\": container with ID starting with f720f9f7d17b7226d0a9dfa47cb6f463f46360a512ccf72b364517ddd81ad339 not found: ID does not exist" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.514455 5003 scope.go:117] "RemoveContainer" containerID="f5048e0ad3480bc9ae224ba2856a499ea7b1d444ef5354f53f40c842192c1030" Jan 04 11:53:08 crc kubenswrapper[5003]: E0104 11:53:08.514752 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f5048e0ad3480bc9ae224ba2856a499ea7b1d444ef5354f53f40c842192c1030\": container with ID starting with f5048e0ad3480bc9ae224ba2856a499ea7b1d444ef5354f53f40c842192c1030 not found: ID does not exist" containerID="f5048e0ad3480bc9ae224ba2856a499ea7b1d444ef5354f53f40c842192c1030" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.514778 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5048e0ad3480bc9ae224ba2856a499ea7b1d444ef5354f53f40c842192c1030"} err="failed to get container status \"f5048e0ad3480bc9ae224ba2856a499ea7b1d444ef5354f53f40c842192c1030\": rpc error: code = NotFound desc = could not find container \"f5048e0ad3480bc9ae224ba2856a499ea7b1d444ef5354f53f40c842192c1030\": container with ID starting with f5048e0ad3480bc9ae224ba2856a499ea7b1d444ef5354f53f40c842192c1030 not found: ID does not exist" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.514794 5003 scope.go:117] "RemoveContainer" containerID="240f370141e056c8e328a585ad73edd8c28f502b0c8f9f23c424c18e1557ecd3" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.529522 5003 scope.go:117] "RemoveContainer" containerID="ecd56eb6896103108eb5f031b82468f6b1fb4715f21eafa4fb0429b43ca8b481" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.547341 5003 scope.go:117] "RemoveContainer" containerID="343acf68591845aee2c7ff0467a9f0e37b6ccbad1d67095330035604ef2ae6e1" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.564115 5003 scope.go:117] "RemoveContainer" containerID="240f370141e056c8e328a585ad73edd8c28f502b0c8f9f23c424c18e1557ecd3" Jan 04 11:53:08 crc kubenswrapper[5003]: E0104 11:53:08.564511 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"240f370141e056c8e328a585ad73edd8c28f502b0c8f9f23c424c18e1557ecd3\": container with ID starting with 240f370141e056c8e328a585ad73edd8c28f502b0c8f9f23c424c18e1557ecd3 not found: ID does not exist" containerID="240f370141e056c8e328a585ad73edd8c28f502b0c8f9f23c424c18e1557ecd3" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.564543 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"240f370141e056c8e328a585ad73edd8c28f502b0c8f9f23c424c18e1557ecd3"} err="failed to get container status \"240f370141e056c8e328a585ad73edd8c28f502b0c8f9f23c424c18e1557ecd3\": rpc error: code = NotFound desc = could not find container \"240f370141e056c8e328a585ad73edd8c28f502b0c8f9f23c424c18e1557ecd3\": container with ID starting with 240f370141e056c8e328a585ad73edd8c28f502b0c8f9f23c424c18e1557ecd3 not found: ID does not exist" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.564571 5003 scope.go:117] "RemoveContainer" containerID="ecd56eb6896103108eb5f031b82468f6b1fb4715f21eafa4fb0429b43ca8b481" Jan 04 11:53:08 crc kubenswrapper[5003]: E0104 11:53:08.564868 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ecd56eb6896103108eb5f031b82468f6b1fb4715f21eafa4fb0429b43ca8b481\": container with ID starting with ecd56eb6896103108eb5f031b82468f6b1fb4715f21eafa4fb0429b43ca8b481 not found: ID does not exist" containerID="ecd56eb6896103108eb5f031b82468f6b1fb4715f21eafa4fb0429b43ca8b481" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.564933 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ecd56eb6896103108eb5f031b82468f6b1fb4715f21eafa4fb0429b43ca8b481"} err="failed to get container status \"ecd56eb6896103108eb5f031b82468f6b1fb4715f21eafa4fb0429b43ca8b481\": rpc error: code = NotFound desc = could not find container \"ecd56eb6896103108eb5f031b82468f6b1fb4715f21eafa4fb0429b43ca8b481\": container with ID starting with ecd56eb6896103108eb5f031b82468f6b1fb4715f21eafa4fb0429b43ca8b481 not found: ID does not exist" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.564975 5003 scope.go:117] "RemoveContainer" containerID="343acf68591845aee2c7ff0467a9f0e37b6ccbad1d67095330035604ef2ae6e1" Jan 04 11:53:08 crc kubenswrapper[5003]: E0104 11:53:08.565241 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"343acf68591845aee2c7ff0467a9f0e37b6ccbad1d67095330035604ef2ae6e1\": container with ID starting with 343acf68591845aee2c7ff0467a9f0e37b6ccbad1d67095330035604ef2ae6e1 not found: ID does not exist" containerID="343acf68591845aee2c7ff0467a9f0e37b6ccbad1d67095330035604ef2ae6e1" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.565262 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"343acf68591845aee2c7ff0467a9f0e37b6ccbad1d67095330035604ef2ae6e1"} err="failed to get container status \"343acf68591845aee2c7ff0467a9f0e37b6ccbad1d67095330035604ef2ae6e1\": rpc error: code = NotFound desc = could not find container \"343acf68591845aee2c7ff0467a9f0e37b6ccbad1d67095330035604ef2ae6e1\": container with ID starting with 343acf68591845aee2c7ff0467a9f0e37b6ccbad1d67095330035604ef2ae6e1 not found: ID does not exist" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.813795 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="527e50de-5345-49f4-9ce8-3aaf8d446bed" path="/var/lib/kubelet/pods/527e50de-5345-49f4-9ce8-3aaf8d446bed/volumes" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.814972 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c5933bf-668f-4062-998b-3b2c5ad3a811" path="/var/lib/kubelet/pods/6c5933bf-668f-4062-998b-3b2c5ad3a811/volumes" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.815749 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4141980-5ab0-4976-81aa-80a2245ae245" path="/var/lib/kubelet/pods/b4141980-5ab0-4976-81aa-80a2245ae245/volumes" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.816906 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf4075fd-a261-4d15-b6e3-02d8c346fe74" path="/var/lib/kubelet/pods/cf4075fd-a261-4d15-b6e3-02d8c346fe74/volumes" Jan 04 11:53:08 crc kubenswrapper[5003]: I0104 11:53:08.817690 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fad64100-7cb6-4457-9b74-ccc0cdf1dbb1" path="/var/lib/kubelet/pods/fad64100-7cb6-4457-9b74-ccc0cdf1dbb1/volumes" Jan 04 11:53:09 crc kubenswrapper[5003]: I0104 11:53:09.268883 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7z829" event={"ID":"bb982b9b-26c8-4b54-b519-5d1c78c38ada","Type":"ContainerStarted","Data":"b72759083d68f58921428fffa01bd44ecef2a11365e9c654028697e9e2385674"} Jan 04 11:53:09 crc kubenswrapper[5003]: I0104 11:53:09.271217 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-7z829" Jan 04 11:53:09 crc kubenswrapper[5003]: I0104 11:53:09.292006 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-7z829" Jan 04 11:53:09 crc kubenswrapper[5003]: I0104 11:53:09.324551 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-7z829" podStartSLOduration=2.324522249 podStartE2EDuration="2.324522249s" podCreationTimestamp="2026-01-04 11:53:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:53:09.304159658 +0000 UTC m=+304.777189509" watchObservedRunningTime="2026-01-04 11:53:09.324522249 +0000 UTC m=+304.797552090" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.227441 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nbxfr"] Jan 04 11:53:16 crc kubenswrapper[5003]: E0104 11:53:16.228631 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4141980-5ab0-4976-81aa-80a2245ae245" containerName="extract-utilities" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.228650 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4141980-5ab0-4976-81aa-80a2245ae245" containerName="extract-utilities" Jan 04 11:53:16 crc kubenswrapper[5003]: E0104 11:53:16.228665 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="527e50de-5345-49f4-9ce8-3aaf8d446bed" containerName="extract-utilities" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.228672 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="527e50de-5345-49f4-9ce8-3aaf8d446bed" containerName="extract-utilities" Jan 04 11:53:16 crc kubenswrapper[5003]: E0104 11:53:16.228681 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fad64100-7cb6-4457-9b74-ccc0cdf1dbb1" containerName="marketplace-operator" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.228690 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="fad64100-7cb6-4457-9b74-ccc0cdf1dbb1" containerName="marketplace-operator" Jan 04 11:53:16 crc kubenswrapper[5003]: E0104 11:53:16.228699 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="527e50de-5345-49f4-9ce8-3aaf8d446bed" containerName="extract-content" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.228706 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="527e50de-5345-49f4-9ce8-3aaf8d446bed" containerName="extract-content" Jan 04 11:53:16 crc kubenswrapper[5003]: E0104 11:53:16.228716 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="527e50de-5345-49f4-9ce8-3aaf8d446bed" containerName="registry-server" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.228724 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="527e50de-5345-49f4-9ce8-3aaf8d446bed" containerName="registry-server" Jan 04 11:53:16 crc kubenswrapper[5003]: E0104 11:53:16.228738 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fad64100-7cb6-4457-9b74-ccc0cdf1dbb1" containerName="marketplace-operator" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.228747 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="fad64100-7cb6-4457-9b74-ccc0cdf1dbb1" containerName="marketplace-operator" Jan 04 11:53:16 crc kubenswrapper[5003]: E0104 11:53:16.228758 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4141980-5ab0-4976-81aa-80a2245ae245" containerName="registry-server" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.228765 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4141980-5ab0-4976-81aa-80a2245ae245" containerName="registry-server" Jan 04 11:53:16 crc kubenswrapper[5003]: E0104 11:53:16.228774 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c5933bf-668f-4062-998b-3b2c5ad3a811" containerName="registry-server" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.228780 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c5933bf-668f-4062-998b-3b2c5ad3a811" containerName="registry-server" Jan 04 11:53:16 crc kubenswrapper[5003]: E0104 11:53:16.228791 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c5933bf-668f-4062-998b-3b2c5ad3a811" containerName="extract-utilities" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.228799 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c5933bf-668f-4062-998b-3b2c5ad3a811" containerName="extract-utilities" Jan 04 11:53:16 crc kubenswrapper[5003]: E0104 11:53:16.228810 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf4075fd-a261-4d15-b6e3-02d8c346fe74" containerName="extract-utilities" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.228818 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf4075fd-a261-4d15-b6e3-02d8c346fe74" containerName="extract-utilities" Jan 04 11:53:16 crc kubenswrapper[5003]: E0104 11:53:16.228834 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf4075fd-a261-4d15-b6e3-02d8c346fe74" containerName="extract-content" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.228843 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf4075fd-a261-4d15-b6e3-02d8c346fe74" containerName="extract-content" Jan 04 11:53:16 crc kubenswrapper[5003]: E0104 11:53:16.228851 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf4075fd-a261-4d15-b6e3-02d8c346fe74" containerName="registry-server" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.228859 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf4075fd-a261-4d15-b6e3-02d8c346fe74" containerName="registry-server" Jan 04 11:53:16 crc kubenswrapper[5003]: E0104 11:53:16.228868 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4141980-5ab0-4976-81aa-80a2245ae245" containerName="extract-content" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.228876 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4141980-5ab0-4976-81aa-80a2245ae245" containerName="extract-content" Jan 04 11:53:16 crc kubenswrapper[5003]: E0104 11:53:16.228888 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c5933bf-668f-4062-998b-3b2c5ad3a811" containerName="extract-content" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.228896 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c5933bf-668f-4062-998b-3b2c5ad3a811" containerName="extract-content" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.229056 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="fad64100-7cb6-4457-9b74-ccc0cdf1dbb1" containerName="marketplace-operator" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.229072 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4141980-5ab0-4976-81aa-80a2245ae245" containerName="registry-server" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.229088 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf4075fd-a261-4d15-b6e3-02d8c346fe74" containerName="registry-server" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.229102 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c5933bf-668f-4062-998b-3b2c5ad3a811" containerName="registry-server" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.229113 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="527e50de-5345-49f4-9ce8-3aaf8d446bed" containerName="registry-server" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.229346 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="fad64100-7cb6-4457-9b74-ccc0cdf1dbb1" containerName="marketplace-operator" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.230094 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nbxfr" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.231901 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.239751 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nbxfr"] Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.275040 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7c99x\" (UniqueName: \"kubernetes.io/projected/4dd9a653-4387-44cf-82a3-e6a7ec6713bf-kube-api-access-7c99x\") pod \"redhat-marketplace-nbxfr\" (UID: \"4dd9a653-4387-44cf-82a3-e6a7ec6713bf\") " pod="openshift-marketplace/redhat-marketplace-nbxfr" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.275104 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4dd9a653-4387-44cf-82a3-e6a7ec6713bf-catalog-content\") pod \"redhat-marketplace-nbxfr\" (UID: \"4dd9a653-4387-44cf-82a3-e6a7ec6713bf\") " pod="openshift-marketplace/redhat-marketplace-nbxfr" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.275489 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4dd9a653-4387-44cf-82a3-e6a7ec6713bf-utilities\") pod \"redhat-marketplace-nbxfr\" (UID: \"4dd9a653-4387-44cf-82a3-e6a7ec6713bf\") " pod="openshift-marketplace/redhat-marketplace-nbxfr" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.376543 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4dd9a653-4387-44cf-82a3-e6a7ec6713bf-catalog-content\") pod \"redhat-marketplace-nbxfr\" (UID: \"4dd9a653-4387-44cf-82a3-e6a7ec6713bf\") " pod="openshift-marketplace/redhat-marketplace-nbxfr" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.376653 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4dd9a653-4387-44cf-82a3-e6a7ec6713bf-utilities\") pod \"redhat-marketplace-nbxfr\" (UID: \"4dd9a653-4387-44cf-82a3-e6a7ec6713bf\") " pod="openshift-marketplace/redhat-marketplace-nbxfr" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.376682 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7c99x\" (UniqueName: \"kubernetes.io/projected/4dd9a653-4387-44cf-82a3-e6a7ec6713bf-kube-api-access-7c99x\") pod \"redhat-marketplace-nbxfr\" (UID: \"4dd9a653-4387-44cf-82a3-e6a7ec6713bf\") " pod="openshift-marketplace/redhat-marketplace-nbxfr" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.377072 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4dd9a653-4387-44cf-82a3-e6a7ec6713bf-catalog-content\") pod \"redhat-marketplace-nbxfr\" (UID: \"4dd9a653-4387-44cf-82a3-e6a7ec6713bf\") " pod="openshift-marketplace/redhat-marketplace-nbxfr" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.377428 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4dd9a653-4387-44cf-82a3-e6a7ec6713bf-utilities\") pod \"redhat-marketplace-nbxfr\" (UID: \"4dd9a653-4387-44cf-82a3-e6a7ec6713bf\") " pod="openshift-marketplace/redhat-marketplace-nbxfr" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.397162 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7c99x\" (UniqueName: \"kubernetes.io/projected/4dd9a653-4387-44cf-82a3-e6a7ec6713bf-kube-api-access-7c99x\") pod \"redhat-marketplace-nbxfr\" (UID: \"4dd9a653-4387-44cf-82a3-e6a7ec6713bf\") " pod="openshift-marketplace/redhat-marketplace-nbxfr" Jan 04 11:53:16 crc kubenswrapper[5003]: I0104 11:53:16.555421 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nbxfr" Jan 04 11:53:17 crc kubenswrapper[5003]: I0104 11:53:17.007179 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nbxfr"] Jan 04 11:53:17 crc kubenswrapper[5003]: W0104 11:53:17.016803 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4dd9a653_4387_44cf_82a3_e6a7ec6713bf.slice/crio-dbb2d3a222a28ac0a30dcb89f0e7dc7f09db115c360f7ecc1d25f107fa9f4488 WatchSource:0}: Error finding container dbb2d3a222a28ac0a30dcb89f0e7dc7f09db115c360f7ecc1d25f107fa9f4488: Status 404 returned error can't find the container with id dbb2d3a222a28ac0a30dcb89f0e7dc7f09db115c360f7ecc1d25f107fa9f4488 Jan 04 11:53:17 crc kubenswrapper[5003]: I0104 11:53:17.309754 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nbxfr" event={"ID":"4dd9a653-4387-44cf-82a3-e6a7ec6713bf","Type":"ContainerStarted","Data":"dbb2d3a222a28ac0a30dcb89f0e7dc7f09db115c360f7ecc1d25f107fa9f4488"} Jan 04 11:53:18 crc kubenswrapper[5003]: I0104 11:53:18.229225 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-l2psm"] Jan 04 11:53:18 crc kubenswrapper[5003]: I0104 11:53:18.230460 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l2psm" Jan 04 11:53:18 crc kubenswrapper[5003]: I0104 11:53:18.231857 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 04 11:53:18 crc kubenswrapper[5003]: I0104 11:53:18.236350 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-l2psm"] Jan 04 11:53:18 crc kubenswrapper[5003]: I0104 11:53:18.321731 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7p6r\" (UniqueName: \"kubernetes.io/projected/a82e6f0c-30d6-4379-926b-4ee8cbd7ff1f-kube-api-access-c7p6r\") pod \"certified-operators-l2psm\" (UID: \"a82e6f0c-30d6-4379-926b-4ee8cbd7ff1f\") " pod="openshift-marketplace/certified-operators-l2psm" Jan 04 11:53:18 crc kubenswrapper[5003]: I0104 11:53:18.321861 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a82e6f0c-30d6-4379-926b-4ee8cbd7ff1f-catalog-content\") pod \"certified-operators-l2psm\" (UID: \"a82e6f0c-30d6-4379-926b-4ee8cbd7ff1f\") " pod="openshift-marketplace/certified-operators-l2psm" Jan 04 11:53:18 crc kubenswrapper[5003]: I0104 11:53:18.321917 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a82e6f0c-30d6-4379-926b-4ee8cbd7ff1f-utilities\") pod \"certified-operators-l2psm\" (UID: \"a82e6f0c-30d6-4379-926b-4ee8cbd7ff1f\") " pod="openshift-marketplace/certified-operators-l2psm" Jan 04 11:53:18 crc kubenswrapper[5003]: E0104 11:53:18.344821 5003 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod40dada5c_5a67_4362_b9fb_e49a7fc32307.slice/crio-ea477a89e313ea3aac2911777d2d07de24287c464bc423950818313935825ea7\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod40dada5c_5a67_4362_b9fb_e49a7fc32307.slice\": RecentStats: unable to find data in memory cache]" Jan 04 11:53:18 crc kubenswrapper[5003]: I0104 11:53:18.423060 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a82e6f0c-30d6-4379-926b-4ee8cbd7ff1f-utilities\") pod \"certified-operators-l2psm\" (UID: \"a82e6f0c-30d6-4379-926b-4ee8cbd7ff1f\") " pod="openshift-marketplace/certified-operators-l2psm" Jan 04 11:53:18 crc kubenswrapper[5003]: I0104 11:53:18.423107 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7p6r\" (UniqueName: \"kubernetes.io/projected/a82e6f0c-30d6-4379-926b-4ee8cbd7ff1f-kube-api-access-c7p6r\") pod \"certified-operators-l2psm\" (UID: \"a82e6f0c-30d6-4379-926b-4ee8cbd7ff1f\") " pod="openshift-marketplace/certified-operators-l2psm" Jan 04 11:53:18 crc kubenswrapper[5003]: I0104 11:53:18.423162 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a82e6f0c-30d6-4379-926b-4ee8cbd7ff1f-catalog-content\") pod \"certified-operators-l2psm\" (UID: \"a82e6f0c-30d6-4379-926b-4ee8cbd7ff1f\") " pod="openshift-marketplace/certified-operators-l2psm" Jan 04 11:53:18 crc kubenswrapper[5003]: I0104 11:53:18.423875 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a82e6f0c-30d6-4379-926b-4ee8cbd7ff1f-utilities\") pod \"certified-operators-l2psm\" (UID: \"a82e6f0c-30d6-4379-926b-4ee8cbd7ff1f\") " pod="openshift-marketplace/certified-operators-l2psm" Jan 04 11:53:18 crc kubenswrapper[5003]: I0104 11:53:18.424992 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a82e6f0c-30d6-4379-926b-4ee8cbd7ff1f-catalog-content\") pod \"certified-operators-l2psm\" (UID: \"a82e6f0c-30d6-4379-926b-4ee8cbd7ff1f\") " pod="openshift-marketplace/certified-operators-l2psm" Jan 04 11:53:18 crc kubenswrapper[5003]: I0104 11:53:18.443264 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7p6r\" (UniqueName: \"kubernetes.io/projected/a82e6f0c-30d6-4379-926b-4ee8cbd7ff1f-kube-api-access-c7p6r\") pod \"certified-operators-l2psm\" (UID: \"a82e6f0c-30d6-4379-926b-4ee8cbd7ff1f\") " pod="openshift-marketplace/certified-operators-l2psm" Jan 04 11:53:18 crc kubenswrapper[5003]: I0104 11:53:18.621328 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l2psm" Jan 04 11:53:18 crc kubenswrapper[5003]: I0104 11:53:18.628657 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4v6d2"] Jan 04 11:53:18 crc kubenswrapper[5003]: I0104 11:53:18.635042 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4v6d2" Jan 04 11:53:18 crc kubenswrapper[5003]: I0104 11:53:18.636817 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4v6d2"] Jan 04 11:53:18 crc kubenswrapper[5003]: I0104 11:53:18.643192 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 04 11:53:18 crc kubenswrapper[5003]: I0104 11:53:18.726408 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5cv58\" (UniqueName: \"kubernetes.io/projected/b778690e-8cf2-43d0-ac8b-f992c11318a8-kube-api-access-5cv58\") pod \"community-operators-4v6d2\" (UID: \"b778690e-8cf2-43d0-ac8b-f992c11318a8\") " pod="openshift-marketplace/community-operators-4v6d2" Jan 04 11:53:18 crc kubenswrapper[5003]: I0104 11:53:18.726915 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b778690e-8cf2-43d0-ac8b-f992c11318a8-utilities\") pod \"community-operators-4v6d2\" (UID: \"b778690e-8cf2-43d0-ac8b-f992c11318a8\") " pod="openshift-marketplace/community-operators-4v6d2" Jan 04 11:53:18 crc kubenswrapper[5003]: I0104 11:53:18.726984 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b778690e-8cf2-43d0-ac8b-f992c11318a8-catalog-content\") pod \"community-operators-4v6d2\" (UID: \"b778690e-8cf2-43d0-ac8b-f992c11318a8\") " pod="openshift-marketplace/community-operators-4v6d2" Jan 04 11:53:18 crc kubenswrapper[5003]: I0104 11:53:18.828122 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5cv58\" (UniqueName: \"kubernetes.io/projected/b778690e-8cf2-43d0-ac8b-f992c11318a8-kube-api-access-5cv58\") pod \"community-operators-4v6d2\" (UID: \"b778690e-8cf2-43d0-ac8b-f992c11318a8\") " pod="openshift-marketplace/community-operators-4v6d2" Jan 04 11:53:18 crc kubenswrapper[5003]: I0104 11:53:18.828196 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b778690e-8cf2-43d0-ac8b-f992c11318a8-utilities\") pod \"community-operators-4v6d2\" (UID: \"b778690e-8cf2-43d0-ac8b-f992c11318a8\") " pod="openshift-marketplace/community-operators-4v6d2" Jan 04 11:53:18 crc kubenswrapper[5003]: I0104 11:53:18.828237 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b778690e-8cf2-43d0-ac8b-f992c11318a8-catalog-content\") pod \"community-operators-4v6d2\" (UID: \"b778690e-8cf2-43d0-ac8b-f992c11318a8\") " pod="openshift-marketplace/community-operators-4v6d2" Jan 04 11:53:18 crc kubenswrapper[5003]: I0104 11:53:18.829060 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b778690e-8cf2-43d0-ac8b-f992c11318a8-utilities\") pod \"community-operators-4v6d2\" (UID: \"b778690e-8cf2-43d0-ac8b-f992c11318a8\") " pod="openshift-marketplace/community-operators-4v6d2" Jan 04 11:53:18 crc kubenswrapper[5003]: I0104 11:53:18.830067 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b778690e-8cf2-43d0-ac8b-f992c11318a8-catalog-content\") pod \"community-operators-4v6d2\" (UID: \"b778690e-8cf2-43d0-ac8b-f992c11318a8\") " pod="openshift-marketplace/community-operators-4v6d2" Jan 04 11:53:18 crc kubenswrapper[5003]: I0104 11:53:18.846510 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5cv58\" (UniqueName: \"kubernetes.io/projected/b778690e-8cf2-43d0-ac8b-f992c11318a8-kube-api-access-5cv58\") pod \"community-operators-4v6d2\" (UID: \"b778690e-8cf2-43d0-ac8b-f992c11318a8\") " pod="openshift-marketplace/community-operators-4v6d2" Jan 04 11:53:18 crc kubenswrapper[5003]: I0104 11:53:18.984375 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4v6d2" Jan 04 11:53:19 crc kubenswrapper[5003]: I0104 11:53:19.043133 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-l2psm"] Jan 04 11:53:19 crc kubenswrapper[5003]: W0104 11:53:19.064188 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda82e6f0c_30d6_4379_926b_4ee8cbd7ff1f.slice/crio-57a36a5b91cf247ecb26df3c7bd7e08b54870e49416132dc5176aa780ea777e8 WatchSource:0}: Error finding container 57a36a5b91cf247ecb26df3c7bd7e08b54870e49416132dc5176aa780ea777e8: Status 404 returned error can't find the container with id 57a36a5b91cf247ecb26df3c7bd7e08b54870e49416132dc5176aa780ea777e8 Jan 04 11:53:19 crc kubenswrapper[5003]: I0104 11:53:19.336979 5003 generic.go:334] "Generic (PLEG): container finished" podID="a82e6f0c-30d6-4379-926b-4ee8cbd7ff1f" containerID="4ad34319c293749d8e189dc02074066cb550c35eefa2be14375c21638ea73b77" exitCode=0 Jan 04 11:53:19 crc kubenswrapper[5003]: I0104 11:53:19.337093 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l2psm" event={"ID":"a82e6f0c-30d6-4379-926b-4ee8cbd7ff1f","Type":"ContainerDied","Data":"4ad34319c293749d8e189dc02074066cb550c35eefa2be14375c21638ea73b77"} Jan 04 11:53:19 crc kubenswrapper[5003]: I0104 11:53:19.337119 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l2psm" event={"ID":"a82e6f0c-30d6-4379-926b-4ee8cbd7ff1f","Type":"ContainerStarted","Data":"57a36a5b91cf247ecb26df3c7bd7e08b54870e49416132dc5176aa780ea777e8"} Jan 04 11:53:19 crc kubenswrapper[5003]: I0104 11:53:19.338644 5003 generic.go:334] "Generic (PLEG): container finished" podID="4dd9a653-4387-44cf-82a3-e6a7ec6713bf" containerID="cafc50989030b7ca883ee89fadfb05fda37f60e59dd0202a7ce4651b78f5f255" exitCode=0 Jan 04 11:53:19 crc kubenswrapper[5003]: I0104 11:53:19.338713 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nbxfr" event={"ID":"4dd9a653-4387-44cf-82a3-e6a7ec6713bf","Type":"ContainerDied","Data":"cafc50989030b7ca883ee89fadfb05fda37f60e59dd0202a7ce4651b78f5f255"} Jan 04 11:53:19 crc kubenswrapper[5003]: I0104 11:53:19.428144 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4v6d2"] Jan 04 11:53:20 crc kubenswrapper[5003]: I0104 11:53:20.232302 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4gnrl"] Jan 04 11:53:20 crc kubenswrapper[5003]: I0104 11:53:20.234124 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4gnrl" Jan 04 11:53:20 crc kubenswrapper[5003]: I0104 11:53:20.237369 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 04 11:53:20 crc kubenswrapper[5003]: I0104 11:53:20.249373 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4gnrl"] Jan 04 11:53:20 crc kubenswrapper[5003]: I0104 11:53:20.345219 5003 generic.go:334] "Generic (PLEG): container finished" podID="b778690e-8cf2-43d0-ac8b-f992c11318a8" containerID="07b3d16be69c7ef8a5e5b3b533b3ad6cb41fd1c45d453d65707dadb8a2cf7467" exitCode=0 Jan 04 11:53:20 crc kubenswrapper[5003]: I0104 11:53:20.345330 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4v6d2" event={"ID":"b778690e-8cf2-43d0-ac8b-f992c11318a8","Type":"ContainerDied","Data":"07b3d16be69c7ef8a5e5b3b533b3ad6cb41fd1c45d453d65707dadb8a2cf7467"} Jan 04 11:53:20 crc kubenswrapper[5003]: I0104 11:53:20.345375 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4v6d2" event={"ID":"b778690e-8cf2-43d0-ac8b-f992c11318a8","Type":"ContainerStarted","Data":"6909ff5586aff3cd7178d60749f2867a58e7fbef7c03d83a4c10c511103751a4"} Jan 04 11:53:20 crc kubenswrapper[5003]: I0104 11:53:20.346996 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/96de3db8-350f-49d3-96ab-2fc8b9535665-utilities\") pod \"redhat-operators-4gnrl\" (UID: \"96de3db8-350f-49d3-96ab-2fc8b9535665\") " pod="openshift-marketplace/redhat-operators-4gnrl" Jan 04 11:53:20 crc kubenswrapper[5003]: I0104 11:53:20.347062 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtzst\" (UniqueName: \"kubernetes.io/projected/96de3db8-350f-49d3-96ab-2fc8b9535665-kube-api-access-xtzst\") pod \"redhat-operators-4gnrl\" (UID: \"96de3db8-350f-49d3-96ab-2fc8b9535665\") " pod="openshift-marketplace/redhat-operators-4gnrl" Jan 04 11:53:20 crc kubenswrapper[5003]: I0104 11:53:20.347099 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/96de3db8-350f-49d3-96ab-2fc8b9535665-catalog-content\") pod \"redhat-operators-4gnrl\" (UID: \"96de3db8-350f-49d3-96ab-2fc8b9535665\") " pod="openshift-marketplace/redhat-operators-4gnrl" Jan 04 11:53:20 crc kubenswrapper[5003]: I0104 11:53:20.349043 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l2psm" event={"ID":"a82e6f0c-30d6-4379-926b-4ee8cbd7ff1f","Type":"ContainerStarted","Data":"b01e0ca6a2a69ca727ee2d8e14ea92c7607bb5ca4bf9aeb73583b1c4c55b659a"} Jan 04 11:53:20 crc kubenswrapper[5003]: I0104 11:53:20.354362 5003 generic.go:334] "Generic (PLEG): container finished" podID="4dd9a653-4387-44cf-82a3-e6a7ec6713bf" containerID="804ebfb6857a0b7c3207dacc21e33973d407b4433ee5384a9f0aeb8b8dba8cea" exitCode=0 Jan 04 11:53:20 crc kubenswrapper[5003]: I0104 11:53:20.354397 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nbxfr" event={"ID":"4dd9a653-4387-44cf-82a3-e6a7ec6713bf","Type":"ContainerDied","Data":"804ebfb6857a0b7c3207dacc21e33973d407b4433ee5384a9f0aeb8b8dba8cea"} Jan 04 11:53:20 crc kubenswrapper[5003]: I0104 11:53:20.448521 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/96de3db8-350f-49d3-96ab-2fc8b9535665-utilities\") pod \"redhat-operators-4gnrl\" (UID: \"96de3db8-350f-49d3-96ab-2fc8b9535665\") " pod="openshift-marketplace/redhat-operators-4gnrl" Jan 04 11:53:20 crc kubenswrapper[5003]: I0104 11:53:20.448599 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtzst\" (UniqueName: \"kubernetes.io/projected/96de3db8-350f-49d3-96ab-2fc8b9535665-kube-api-access-xtzst\") pod \"redhat-operators-4gnrl\" (UID: \"96de3db8-350f-49d3-96ab-2fc8b9535665\") " pod="openshift-marketplace/redhat-operators-4gnrl" Jan 04 11:53:20 crc kubenswrapper[5003]: I0104 11:53:20.448669 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/96de3db8-350f-49d3-96ab-2fc8b9535665-catalog-content\") pod \"redhat-operators-4gnrl\" (UID: \"96de3db8-350f-49d3-96ab-2fc8b9535665\") " pod="openshift-marketplace/redhat-operators-4gnrl" Jan 04 11:53:20 crc kubenswrapper[5003]: I0104 11:53:20.449313 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/96de3db8-350f-49d3-96ab-2fc8b9535665-catalog-content\") pod \"redhat-operators-4gnrl\" (UID: \"96de3db8-350f-49d3-96ab-2fc8b9535665\") " pod="openshift-marketplace/redhat-operators-4gnrl" Jan 04 11:53:20 crc kubenswrapper[5003]: I0104 11:53:20.451199 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/96de3db8-350f-49d3-96ab-2fc8b9535665-utilities\") pod \"redhat-operators-4gnrl\" (UID: \"96de3db8-350f-49d3-96ab-2fc8b9535665\") " pod="openshift-marketplace/redhat-operators-4gnrl" Jan 04 11:53:20 crc kubenswrapper[5003]: I0104 11:53:20.468966 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtzst\" (UniqueName: \"kubernetes.io/projected/96de3db8-350f-49d3-96ab-2fc8b9535665-kube-api-access-xtzst\") pod \"redhat-operators-4gnrl\" (UID: \"96de3db8-350f-49d3-96ab-2fc8b9535665\") " pod="openshift-marketplace/redhat-operators-4gnrl" Jan 04 11:53:20 crc kubenswrapper[5003]: I0104 11:53:20.559821 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4gnrl" Jan 04 11:53:20 crc kubenswrapper[5003]: I0104 11:53:20.969111 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4gnrl"] Jan 04 11:53:21 crc kubenswrapper[5003]: I0104 11:53:21.361781 5003 generic.go:334] "Generic (PLEG): container finished" podID="96de3db8-350f-49d3-96ab-2fc8b9535665" containerID="624c0f47e7ee124934a24f2306d31dcb718c1d60b3b06608dafe598f7481cd5c" exitCode=0 Jan 04 11:53:21 crc kubenswrapper[5003]: I0104 11:53:21.361870 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4gnrl" event={"ID":"96de3db8-350f-49d3-96ab-2fc8b9535665","Type":"ContainerDied","Data":"624c0f47e7ee124934a24f2306d31dcb718c1d60b3b06608dafe598f7481cd5c"} Jan 04 11:53:21 crc kubenswrapper[5003]: I0104 11:53:21.361899 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4gnrl" event={"ID":"96de3db8-350f-49d3-96ab-2fc8b9535665","Type":"ContainerStarted","Data":"06bee353b691f8ca7511e7f5a54d764fa050a3bfa747a9401d342ae10e20ef54"} Jan 04 11:53:21 crc kubenswrapper[5003]: I0104 11:53:21.364751 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nbxfr" event={"ID":"4dd9a653-4387-44cf-82a3-e6a7ec6713bf","Type":"ContainerStarted","Data":"aa284a7370c2eeff9edf6d66454fbd0bdbad39b6cc2028553886643035146380"} Jan 04 11:53:21 crc kubenswrapper[5003]: I0104 11:53:21.366519 5003 generic.go:334] "Generic (PLEG): container finished" podID="b778690e-8cf2-43d0-ac8b-f992c11318a8" containerID="4b926c329f9a424993f628115fb15bc93d2c1b329fc9f53b7ce9636acde618cd" exitCode=0 Jan 04 11:53:21 crc kubenswrapper[5003]: I0104 11:53:21.366557 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4v6d2" event={"ID":"b778690e-8cf2-43d0-ac8b-f992c11318a8","Type":"ContainerDied","Data":"4b926c329f9a424993f628115fb15bc93d2c1b329fc9f53b7ce9636acde618cd"} Jan 04 11:53:21 crc kubenswrapper[5003]: I0104 11:53:21.368408 5003 generic.go:334] "Generic (PLEG): container finished" podID="a82e6f0c-30d6-4379-926b-4ee8cbd7ff1f" containerID="b01e0ca6a2a69ca727ee2d8e14ea92c7607bb5ca4bf9aeb73583b1c4c55b659a" exitCode=0 Jan 04 11:53:21 crc kubenswrapper[5003]: I0104 11:53:21.368445 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l2psm" event={"ID":"a82e6f0c-30d6-4379-926b-4ee8cbd7ff1f","Type":"ContainerDied","Data":"b01e0ca6a2a69ca727ee2d8e14ea92c7607bb5ca4bf9aeb73583b1c4c55b659a"} Jan 04 11:53:21 crc kubenswrapper[5003]: I0104 11:53:21.404265 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nbxfr" podStartSLOduration=3.99036544 podStartE2EDuration="5.404247099s" podCreationTimestamp="2026-01-04 11:53:16 +0000 UTC" firstStartedPulling="2026-01-04 11:53:19.339831331 +0000 UTC m=+314.812861172" lastFinishedPulling="2026-01-04 11:53:20.75371299 +0000 UTC m=+316.226742831" observedRunningTime="2026-01-04 11:53:21.402172063 +0000 UTC m=+316.875201924" watchObservedRunningTime="2026-01-04 11:53:21.404247099 +0000 UTC m=+316.877276940" Jan 04 11:53:22 crc kubenswrapper[5003]: I0104 11:53:22.375174 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4v6d2" event={"ID":"b778690e-8cf2-43d0-ac8b-f992c11318a8","Type":"ContainerStarted","Data":"5d9a818095e1f6d976045141cbf60c2f934593145c9cbf06ef4b4628d5b5d2d1"} Jan 04 11:53:22 crc kubenswrapper[5003]: I0104 11:53:22.376975 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l2psm" event={"ID":"a82e6f0c-30d6-4379-926b-4ee8cbd7ff1f","Type":"ContainerStarted","Data":"02c7a91ad7b96fba509f2c394cf54a45942f9eedd9289c021a1567cce829a829"} Jan 04 11:53:22 crc kubenswrapper[5003]: I0104 11:53:22.378399 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4gnrl" event={"ID":"96de3db8-350f-49d3-96ab-2fc8b9535665","Type":"ContainerStarted","Data":"1d88a99f923641144a5076d151d18f38de9ac72353ff59031ef18d09f0891d29"} Jan 04 11:53:22 crc kubenswrapper[5003]: I0104 11:53:22.395361 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4v6d2" podStartSLOduration=2.975633603 podStartE2EDuration="4.395344041s" podCreationTimestamp="2026-01-04 11:53:18 +0000 UTC" firstStartedPulling="2026-01-04 11:53:20.34786044 +0000 UTC m=+315.820890281" lastFinishedPulling="2026-01-04 11:53:21.767570878 +0000 UTC m=+317.240600719" observedRunningTime="2026-01-04 11:53:22.392116633 +0000 UTC m=+317.865146494" watchObservedRunningTime="2026-01-04 11:53:22.395344041 +0000 UTC m=+317.868373902" Jan 04 11:53:22 crc kubenswrapper[5003]: I0104 11:53:22.419778 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-l2psm" podStartSLOduration=1.971385366 podStartE2EDuration="4.419763531s" podCreationTimestamp="2026-01-04 11:53:18 +0000 UTC" firstStartedPulling="2026-01-04 11:53:19.3401484 +0000 UTC m=+314.813178241" lastFinishedPulling="2026-01-04 11:53:21.788526565 +0000 UTC m=+317.261556406" observedRunningTime="2026-01-04 11:53:22.417989013 +0000 UTC m=+317.891018854" watchObservedRunningTime="2026-01-04 11:53:22.419763531 +0000 UTC m=+317.892793372" Jan 04 11:53:23 crc kubenswrapper[5003]: I0104 11:53:23.385028 5003 generic.go:334] "Generic (PLEG): container finished" podID="96de3db8-350f-49d3-96ab-2fc8b9535665" containerID="1d88a99f923641144a5076d151d18f38de9ac72353ff59031ef18d09f0891d29" exitCode=0 Jan 04 11:53:23 crc kubenswrapper[5003]: I0104 11:53:23.385276 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4gnrl" event={"ID":"96de3db8-350f-49d3-96ab-2fc8b9535665","Type":"ContainerDied","Data":"1d88a99f923641144a5076d151d18f38de9ac72353ff59031ef18d09f0891d29"} Jan 04 11:53:24 crc kubenswrapper[5003]: I0104 11:53:24.393098 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4gnrl" event={"ID":"96de3db8-350f-49d3-96ab-2fc8b9535665","Type":"ContainerStarted","Data":"6e065a878b31da2240c8eed296fd6831a520759361448293fd34c093c12c0d35"} Jan 04 11:53:24 crc kubenswrapper[5003]: I0104 11:53:24.413079 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4gnrl" podStartSLOduration=1.948723438 podStartE2EDuration="4.413061495s" podCreationTimestamp="2026-01-04 11:53:20 +0000 UTC" firstStartedPulling="2026-01-04 11:53:21.363381673 +0000 UTC m=+316.836411524" lastFinishedPulling="2026-01-04 11:53:23.82771974 +0000 UTC m=+319.300749581" observedRunningTime="2026-01-04 11:53:24.410517146 +0000 UTC m=+319.883546997" watchObservedRunningTime="2026-01-04 11:53:24.413061495 +0000 UTC m=+319.886091346" Jan 04 11:53:26 crc kubenswrapper[5003]: I0104 11:53:26.556597 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nbxfr" Jan 04 11:53:26 crc kubenswrapper[5003]: I0104 11:53:26.557058 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nbxfr" Jan 04 11:53:26 crc kubenswrapper[5003]: I0104 11:53:26.621897 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nbxfr" Jan 04 11:53:27 crc kubenswrapper[5003]: I0104 11:53:27.490151 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nbxfr" Jan 04 11:53:28 crc kubenswrapper[5003]: E0104 11:53:28.520248 5003 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod40dada5c_5a67_4362_b9fb_e49a7fc32307.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod40dada5c_5a67_4362_b9fb_e49a7fc32307.slice/crio-ea477a89e313ea3aac2911777d2d07de24287c464bc423950818313935825ea7\": RecentStats: unable to find data in memory cache]" Jan 04 11:53:28 crc kubenswrapper[5003]: I0104 11:53:28.622119 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-l2psm" Jan 04 11:53:28 crc kubenswrapper[5003]: I0104 11:53:28.622430 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-l2psm" Jan 04 11:53:28 crc kubenswrapper[5003]: I0104 11:53:28.671906 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-l2psm" Jan 04 11:53:28 crc kubenswrapper[5003]: I0104 11:53:28.985568 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4v6d2" Jan 04 11:53:28 crc kubenswrapper[5003]: I0104 11:53:28.985620 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4v6d2" Jan 04 11:53:29 crc kubenswrapper[5003]: I0104 11:53:29.023385 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4v6d2" Jan 04 11:53:29 crc kubenswrapper[5003]: I0104 11:53:29.475788 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4v6d2" Jan 04 11:53:29 crc kubenswrapper[5003]: I0104 11:53:29.487649 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-l2psm" Jan 04 11:53:30 crc kubenswrapper[5003]: I0104 11:53:30.560886 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4gnrl" Jan 04 11:53:30 crc kubenswrapper[5003]: I0104 11:53:30.560980 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4gnrl" Jan 04 11:53:30 crc kubenswrapper[5003]: I0104 11:53:30.607825 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4gnrl" Jan 04 11:53:31 crc kubenswrapper[5003]: I0104 11:53:31.482654 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4gnrl" Jan 04 11:53:32 crc kubenswrapper[5003]: I0104 11:53:32.268170 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-927fs"] Jan 04 11:53:32 crc kubenswrapper[5003]: I0104 11:53:32.269611 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-927fs" Jan 04 11:53:32 crc kubenswrapper[5003]: I0104 11:53:32.283304 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-927fs"] Jan 04 11:53:32 crc kubenswrapper[5003]: I0104 11:53:32.313846 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/52ff60cd-49eb-43cf-af8b-8f4024df8471-installation-pull-secrets\") pod \"image-registry-66df7c8f76-927fs\" (UID: \"52ff60cd-49eb-43cf-af8b-8f4024df8471\") " pod="openshift-image-registry/image-registry-66df7c8f76-927fs" Jan 04 11:53:32 crc kubenswrapper[5003]: I0104 11:53:32.313977 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvhlb\" (UniqueName: \"kubernetes.io/projected/52ff60cd-49eb-43cf-af8b-8f4024df8471-kube-api-access-rvhlb\") pod \"image-registry-66df7c8f76-927fs\" (UID: \"52ff60cd-49eb-43cf-af8b-8f4024df8471\") " pod="openshift-image-registry/image-registry-66df7c8f76-927fs" Jan 04 11:53:32 crc kubenswrapper[5003]: I0104 11:53:32.314049 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/52ff60cd-49eb-43cf-af8b-8f4024df8471-registry-tls\") pod \"image-registry-66df7c8f76-927fs\" (UID: \"52ff60cd-49eb-43cf-af8b-8f4024df8471\") " pod="openshift-image-registry/image-registry-66df7c8f76-927fs" Jan 04 11:53:32 crc kubenswrapper[5003]: I0104 11:53:32.314088 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/52ff60cd-49eb-43cf-af8b-8f4024df8471-registry-certificates\") pod \"image-registry-66df7c8f76-927fs\" (UID: \"52ff60cd-49eb-43cf-af8b-8f4024df8471\") " pod="openshift-image-registry/image-registry-66df7c8f76-927fs" Jan 04 11:53:32 crc kubenswrapper[5003]: I0104 11:53:32.314145 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/52ff60cd-49eb-43cf-af8b-8f4024df8471-ca-trust-extracted\") pod \"image-registry-66df7c8f76-927fs\" (UID: \"52ff60cd-49eb-43cf-af8b-8f4024df8471\") " pod="openshift-image-registry/image-registry-66df7c8f76-927fs" Jan 04 11:53:32 crc kubenswrapper[5003]: I0104 11:53:32.314377 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/52ff60cd-49eb-43cf-af8b-8f4024df8471-bound-sa-token\") pod \"image-registry-66df7c8f76-927fs\" (UID: \"52ff60cd-49eb-43cf-af8b-8f4024df8471\") " pod="openshift-image-registry/image-registry-66df7c8f76-927fs" Jan 04 11:53:32 crc kubenswrapper[5003]: I0104 11:53:32.314429 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/52ff60cd-49eb-43cf-af8b-8f4024df8471-trusted-ca\") pod \"image-registry-66df7c8f76-927fs\" (UID: \"52ff60cd-49eb-43cf-af8b-8f4024df8471\") " pod="openshift-image-registry/image-registry-66df7c8f76-927fs" Jan 04 11:53:32 crc kubenswrapper[5003]: I0104 11:53:32.314481 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-927fs\" (UID: \"52ff60cd-49eb-43cf-af8b-8f4024df8471\") " pod="openshift-image-registry/image-registry-66df7c8f76-927fs" Jan 04 11:53:32 crc kubenswrapper[5003]: I0104 11:53:32.364356 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-927fs\" (UID: \"52ff60cd-49eb-43cf-af8b-8f4024df8471\") " pod="openshift-image-registry/image-registry-66df7c8f76-927fs" Jan 04 11:53:32 crc kubenswrapper[5003]: I0104 11:53:32.415556 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvhlb\" (UniqueName: \"kubernetes.io/projected/52ff60cd-49eb-43cf-af8b-8f4024df8471-kube-api-access-rvhlb\") pod \"image-registry-66df7c8f76-927fs\" (UID: \"52ff60cd-49eb-43cf-af8b-8f4024df8471\") " pod="openshift-image-registry/image-registry-66df7c8f76-927fs" Jan 04 11:53:32 crc kubenswrapper[5003]: I0104 11:53:32.415610 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/52ff60cd-49eb-43cf-af8b-8f4024df8471-registry-tls\") pod \"image-registry-66df7c8f76-927fs\" (UID: \"52ff60cd-49eb-43cf-af8b-8f4024df8471\") " pod="openshift-image-registry/image-registry-66df7c8f76-927fs" Jan 04 11:53:32 crc kubenswrapper[5003]: I0104 11:53:32.415633 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/52ff60cd-49eb-43cf-af8b-8f4024df8471-registry-certificates\") pod \"image-registry-66df7c8f76-927fs\" (UID: \"52ff60cd-49eb-43cf-af8b-8f4024df8471\") " pod="openshift-image-registry/image-registry-66df7c8f76-927fs" Jan 04 11:53:32 crc kubenswrapper[5003]: I0104 11:53:32.415659 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/52ff60cd-49eb-43cf-af8b-8f4024df8471-ca-trust-extracted\") pod \"image-registry-66df7c8f76-927fs\" (UID: \"52ff60cd-49eb-43cf-af8b-8f4024df8471\") " pod="openshift-image-registry/image-registry-66df7c8f76-927fs" Jan 04 11:53:32 crc kubenswrapper[5003]: I0104 11:53:32.415700 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/52ff60cd-49eb-43cf-af8b-8f4024df8471-bound-sa-token\") pod \"image-registry-66df7c8f76-927fs\" (UID: \"52ff60cd-49eb-43cf-af8b-8f4024df8471\") " pod="openshift-image-registry/image-registry-66df7c8f76-927fs" Jan 04 11:53:32 crc kubenswrapper[5003]: I0104 11:53:32.415716 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/52ff60cd-49eb-43cf-af8b-8f4024df8471-trusted-ca\") pod \"image-registry-66df7c8f76-927fs\" (UID: \"52ff60cd-49eb-43cf-af8b-8f4024df8471\") " pod="openshift-image-registry/image-registry-66df7c8f76-927fs" Jan 04 11:53:32 crc kubenswrapper[5003]: I0104 11:53:32.415742 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/52ff60cd-49eb-43cf-af8b-8f4024df8471-installation-pull-secrets\") pod \"image-registry-66df7c8f76-927fs\" (UID: \"52ff60cd-49eb-43cf-af8b-8f4024df8471\") " pod="openshift-image-registry/image-registry-66df7c8f76-927fs" Jan 04 11:53:32 crc kubenswrapper[5003]: I0104 11:53:32.417590 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/52ff60cd-49eb-43cf-af8b-8f4024df8471-trusted-ca\") pod \"image-registry-66df7c8f76-927fs\" (UID: \"52ff60cd-49eb-43cf-af8b-8f4024df8471\") " pod="openshift-image-registry/image-registry-66df7c8f76-927fs" Jan 04 11:53:32 crc kubenswrapper[5003]: I0104 11:53:32.417876 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/52ff60cd-49eb-43cf-af8b-8f4024df8471-ca-trust-extracted\") pod \"image-registry-66df7c8f76-927fs\" (UID: \"52ff60cd-49eb-43cf-af8b-8f4024df8471\") " pod="openshift-image-registry/image-registry-66df7c8f76-927fs" Jan 04 11:53:32 crc kubenswrapper[5003]: I0104 11:53:32.418976 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/52ff60cd-49eb-43cf-af8b-8f4024df8471-registry-certificates\") pod \"image-registry-66df7c8f76-927fs\" (UID: \"52ff60cd-49eb-43cf-af8b-8f4024df8471\") " pod="openshift-image-registry/image-registry-66df7c8f76-927fs" Jan 04 11:53:32 crc kubenswrapper[5003]: I0104 11:53:32.424784 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/52ff60cd-49eb-43cf-af8b-8f4024df8471-installation-pull-secrets\") pod \"image-registry-66df7c8f76-927fs\" (UID: \"52ff60cd-49eb-43cf-af8b-8f4024df8471\") " pod="openshift-image-registry/image-registry-66df7c8f76-927fs" Jan 04 11:53:32 crc kubenswrapper[5003]: I0104 11:53:32.434543 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/52ff60cd-49eb-43cf-af8b-8f4024df8471-registry-tls\") pod \"image-registry-66df7c8f76-927fs\" (UID: \"52ff60cd-49eb-43cf-af8b-8f4024df8471\") " pod="openshift-image-registry/image-registry-66df7c8f76-927fs" Jan 04 11:53:32 crc kubenswrapper[5003]: I0104 11:53:32.438584 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvhlb\" (UniqueName: \"kubernetes.io/projected/52ff60cd-49eb-43cf-af8b-8f4024df8471-kube-api-access-rvhlb\") pod \"image-registry-66df7c8f76-927fs\" (UID: \"52ff60cd-49eb-43cf-af8b-8f4024df8471\") " pod="openshift-image-registry/image-registry-66df7c8f76-927fs" Jan 04 11:53:32 crc kubenswrapper[5003]: I0104 11:53:32.447922 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/52ff60cd-49eb-43cf-af8b-8f4024df8471-bound-sa-token\") pod \"image-registry-66df7c8f76-927fs\" (UID: \"52ff60cd-49eb-43cf-af8b-8f4024df8471\") " pod="openshift-image-registry/image-registry-66df7c8f76-927fs" Jan 04 11:53:32 crc kubenswrapper[5003]: I0104 11:53:32.635544 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-927fs" Jan 04 11:53:33 crc kubenswrapper[5003]: I0104 11:53:33.117373 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-927fs"] Jan 04 11:53:33 crc kubenswrapper[5003]: W0104 11:53:33.123581 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod52ff60cd_49eb_43cf_af8b_8f4024df8471.slice/crio-306dcb8f095364ca8647bc693b91ac6ddbcc0bbd8c172ea63d312627499a2e2e WatchSource:0}: Error finding container 306dcb8f095364ca8647bc693b91ac6ddbcc0bbd8c172ea63d312627499a2e2e: Status 404 returned error can't find the container with id 306dcb8f095364ca8647bc693b91ac6ddbcc0bbd8c172ea63d312627499a2e2e Jan 04 11:53:33 crc kubenswrapper[5003]: I0104 11:53:33.445497 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-927fs" event={"ID":"52ff60cd-49eb-43cf-af8b-8f4024df8471","Type":"ContainerStarted","Data":"306dcb8f095364ca8647bc693b91ac6ddbcc0bbd8c172ea63d312627499a2e2e"} Jan 04 11:53:34 crc kubenswrapper[5003]: I0104 11:53:34.450887 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-927fs" event={"ID":"52ff60cd-49eb-43cf-af8b-8f4024df8471","Type":"ContainerStarted","Data":"1b9bbc08cbf0a79317ced6a5cf59a76c46d25b011544a6cd4f6694741f183ae6"} Jan 04 11:53:34 crc kubenswrapper[5003]: I0104 11:53:34.451006 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-927fs" Jan 04 11:53:34 crc kubenswrapper[5003]: I0104 11:53:34.478406 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-927fs" podStartSLOduration=2.478386384 podStartE2EDuration="2.478386384s" podCreationTimestamp="2026-01-04 11:53:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:53:34.477943632 +0000 UTC m=+329.950973473" watchObservedRunningTime="2026-01-04 11:53:34.478386384 +0000 UTC m=+329.951416215" Jan 04 11:53:38 crc kubenswrapper[5003]: E0104 11:53:38.648712 5003 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod40dada5c_5a67_4362_b9fb_e49a7fc32307.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod40dada5c_5a67_4362_b9fb_e49a7fc32307.slice/crio-ea477a89e313ea3aac2911777d2d07de24287c464bc423950818313935825ea7\": RecentStats: unable to find data in memory cache]" Jan 04 11:53:48 crc kubenswrapper[5003]: E0104 11:53:48.840312 5003 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod40dada5c_5a67_4362_b9fb_e49a7fc32307.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod40dada5c_5a67_4362_b9fb_e49a7fc32307.slice/crio-ea477a89e313ea3aac2911777d2d07de24287c464bc423950818313935825ea7\": RecentStats: unable to find data in memory cache]" Jan 04 11:53:52 crc kubenswrapper[5003]: I0104 11:53:52.642671 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-927fs" Jan 04 11:53:52 crc kubenswrapper[5003]: I0104 11:53:52.739027 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-d2kjq"] Jan 04 11:53:59 crc kubenswrapper[5003]: E0104 11:53:59.001729 5003 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod40dada5c_5a67_4362_b9fb_e49a7fc32307.slice/crio-ea477a89e313ea3aac2911777d2d07de24287c464bc423950818313935825ea7\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod40dada5c_5a67_4362_b9fb_e49a7fc32307.slice\": RecentStats: unable to find data in memory cache]" Jan 04 11:54:09 crc kubenswrapper[5003]: I0104 11:54:09.419258 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 11:54:09 crc kubenswrapper[5003]: I0104 11:54:09.419918 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 11:54:17 crc kubenswrapper[5003]: I0104 11:54:17.775168 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" podUID="edc8a447-cc41-4241-be6a-957fa4255108" containerName="registry" containerID="cri-o://006b48442dcfe367e3322d88efc6489f287d943e85c97e918c7641a5088ed7ce" gracePeriod=30 Jan 04 11:54:18 crc kubenswrapper[5003]: I0104 11:54:18.268302 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:54:18 crc kubenswrapper[5003]: I0104 11:54:18.409541 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/edc8a447-cc41-4241-be6a-957fa4255108-registry-certificates\") pod \"edc8a447-cc41-4241-be6a-957fa4255108\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " Jan 04 11:54:18 crc kubenswrapper[5003]: I0104 11:54:18.409686 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/edc8a447-cc41-4241-be6a-957fa4255108-installation-pull-secrets\") pod \"edc8a447-cc41-4241-be6a-957fa4255108\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " Jan 04 11:54:18 crc kubenswrapper[5003]: I0104 11:54:18.409916 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"edc8a447-cc41-4241-be6a-957fa4255108\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " Jan 04 11:54:18 crc kubenswrapper[5003]: I0104 11:54:18.409977 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/edc8a447-cc41-4241-be6a-957fa4255108-trusted-ca\") pod \"edc8a447-cc41-4241-be6a-957fa4255108\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " Jan 04 11:54:18 crc kubenswrapper[5003]: I0104 11:54:18.410070 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/edc8a447-cc41-4241-be6a-957fa4255108-ca-trust-extracted\") pod \"edc8a447-cc41-4241-be6a-957fa4255108\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " Jan 04 11:54:18 crc kubenswrapper[5003]: I0104 11:54:18.410109 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/edc8a447-cc41-4241-be6a-957fa4255108-registry-tls\") pod \"edc8a447-cc41-4241-be6a-957fa4255108\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " Jan 04 11:54:18 crc kubenswrapper[5003]: I0104 11:54:18.410144 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/edc8a447-cc41-4241-be6a-957fa4255108-bound-sa-token\") pod \"edc8a447-cc41-4241-be6a-957fa4255108\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " Jan 04 11:54:18 crc kubenswrapper[5003]: I0104 11:54:18.410182 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z4tnb\" (UniqueName: \"kubernetes.io/projected/edc8a447-cc41-4241-be6a-957fa4255108-kube-api-access-z4tnb\") pod \"edc8a447-cc41-4241-be6a-957fa4255108\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " Jan 04 11:54:18 crc kubenswrapper[5003]: I0104 11:54:18.410826 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/edc8a447-cc41-4241-be6a-957fa4255108-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "edc8a447-cc41-4241-be6a-957fa4255108" (UID: "edc8a447-cc41-4241-be6a-957fa4255108"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:54:18 crc kubenswrapper[5003]: I0104 11:54:18.410967 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/edc8a447-cc41-4241-be6a-957fa4255108-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "edc8a447-cc41-4241-be6a-957fa4255108" (UID: "edc8a447-cc41-4241-be6a-957fa4255108"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:54:18 crc kubenswrapper[5003]: I0104 11:54:18.416560 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/edc8a447-cc41-4241-be6a-957fa4255108-kube-api-access-z4tnb" (OuterVolumeSpecName: "kube-api-access-z4tnb") pod "edc8a447-cc41-4241-be6a-957fa4255108" (UID: "edc8a447-cc41-4241-be6a-957fa4255108"). InnerVolumeSpecName "kube-api-access-z4tnb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:54:18 crc kubenswrapper[5003]: I0104 11:54:18.418715 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/edc8a447-cc41-4241-be6a-957fa4255108-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "edc8a447-cc41-4241-be6a-957fa4255108" (UID: "edc8a447-cc41-4241-be6a-957fa4255108"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:54:18 crc kubenswrapper[5003]: I0104 11:54:18.419730 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/edc8a447-cc41-4241-be6a-957fa4255108-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "edc8a447-cc41-4241-be6a-957fa4255108" (UID: "edc8a447-cc41-4241-be6a-957fa4255108"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:54:18 crc kubenswrapper[5003]: E0104 11:54:18.419866 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:edc8a447-cc41-4241-be6a-957fa4255108 nodeName:}" failed. No retries permitted until 2026-01-04 11:54:18.919833875 +0000 UTC m=+374.392863886 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "registry-storage" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "edc8a447-cc41-4241-be6a-957fa4255108" (UID: "edc8a447-cc41-4241-be6a-957fa4255108") : kubernetes.io/csi: Unmounter.TearDownAt failed: rpc error: code = Unknown desc = check target path: could not get consistent content of /proc/mounts after 3 attempts Jan 04 11:54:18 crc kubenswrapper[5003]: I0104 11:54:18.419879 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/edc8a447-cc41-4241-be6a-957fa4255108-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "edc8a447-cc41-4241-be6a-957fa4255108" (UID: "edc8a447-cc41-4241-be6a-957fa4255108"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:54:18 crc kubenswrapper[5003]: I0104 11:54:18.440525 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/edc8a447-cc41-4241-be6a-957fa4255108-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "edc8a447-cc41-4241-be6a-957fa4255108" (UID: "edc8a447-cc41-4241-be6a-957fa4255108"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 11:54:18 crc kubenswrapper[5003]: I0104 11:54:18.511772 5003 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/edc8a447-cc41-4241-be6a-957fa4255108-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:18 crc kubenswrapper[5003]: I0104 11:54:18.511805 5003 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/edc8a447-cc41-4241-be6a-957fa4255108-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:18 crc kubenswrapper[5003]: I0104 11:54:18.511814 5003 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/edc8a447-cc41-4241-be6a-957fa4255108-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:18 crc kubenswrapper[5003]: I0104 11:54:18.511825 5003 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/edc8a447-cc41-4241-be6a-957fa4255108-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:18 crc kubenswrapper[5003]: I0104 11:54:18.511833 5003 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/edc8a447-cc41-4241-be6a-957fa4255108-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:18 crc kubenswrapper[5003]: I0104 11:54:18.511841 5003 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/edc8a447-cc41-4241-be6a-957fa4255108-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:18 crc kubenswrapper[5003]: I0104 11:54:18.511849 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z4tnb\" (UniqueName: \"kubernetes.io/projected/edc8a447-cc41-4241-be6a-957fa4255108-kube-api-access-z4tnb\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:18 crc kubenswrapper[5003]: I0104 11:54:18.732920 5003 generic.go:334] "Generic (PLEG): container finished" podID="edc8a447-cc41-4241-be6a-957fa4255108" containerID="006b48442dcfe367e3322d88efc6489f287d943e85c97e918c7641a5088ed7ce" exitCode=0 Jan 04 11:54:18 crc kubenswrapper[5003]: I0104 11:54:18.732971 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" event={"ID":"edc8a447-cc41-4241-be6a-957fa4255108","Type":"ContainerDied","Data":"006b48442dcfe367e3322d88efc6489f287d943e85c97e918c7641a5088ed7ce"} Jan 04 11:54:18 crc kubenswrapper[5003]: I0104 11:54:18.732981 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" Jan 04 11:54:18 crc kubenswrapper[5003]: I0104 11:54:18.733002 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-d2kjq" event={"ID":"edc8a447-cc41-4241-be6a-957fa4255108","Type":"ContainerDied","Data":"9a835883d2863c3efe8cba0e6ea21d17af5c8eec58e6c0d4bcffd669d93ec8ff"} Jan 04 11:54:18 crc kubenswrapper[5003]: I0104 11:54:18.733060 5003 scope.go:117] "RemoveContainer" containerID="006b48442dcfe367e3322d88efc6489f287d943e85c97e918c7641a5088ed7ce" Jan 04 11:54:18 crc kubenswrapper[5003]: I0104 11:54:18.758850 5003 scope.go:117] "RemoveContainer" containerID="006b48442dcfe367e3322d88efc6489f287d943e85c97e918c7641a5088ed7ce" Jan 04 11:54:18 crc kubenswrapper[5003]: E0104 11:54:18.759472 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"006b48442dcfe367e3322d88efc6489f287d943e85c97e918c7641a5088ed7ce\": container with ID starting with 006b48442dcfe367e3322d88efc6489f287d943e85c97e918c7641a5088ed7ce not found: ID does not exist" containerID="006b48442dcfe367e3322d88efc6489f287d943e85c97e918c7641a5088ed7ce" Jan 04 11:54:18 crc kubenswrapper[5003]: I0104 11:54:18.759531 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"006b48442dcfe367e3322d88efc6489f287d943e85c97e918c7641a5088ed7ce"} err="failed to get container status \"006b48442dcfe367e3322d88efc6489f287d943e85c97e918c7641a5088ed7ce\": rpc error: code = NotFound desc = could not find container \"006b48442dcfe367e3322d88efc6489f287d943e85c97e918c7641a5088ed7ce\": container with ID starting with 006b48442dcfe367e3322d88efc6489f287d943e85c97e918c7641a5088ed7ce not found: ID does not exist" Jan 04 11:54:19 crc kubenswrapper[5003]: I0104 11:54:19.020372 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"edc8a447-cc41-4241-be6a-957fa4255108\" (UID: \"edc8a447-cc41-4241-be6a-957fa4255108\") " Jan 04 11:54:19 crc kubenswrapper[5003]: I0104 11:54:19.038115 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "edc8a447-cc41-4241-be6a-957fa4255108" (UID: "edc8a447-cc41-4241-be6a-957fa4255108"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 04 11:54:19 crc kubenswrapper[5003]: I0104 11:54:19.185164 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-d2kjq"] Jan 04 11:54:19 crc kubenswrapper[5003]: I0104 11:54:19.190087 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-d2kjq"] Jan 04 11:54:20 crc kubenswrapper[5003]: I0104 11:54:20.816884 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="edc8a447-cc41-4241-be6a-957fa4255108" path="/var/lib/kubelet/pods/edc8a447-cc41-4241-be6a-957fa4255108/volumes" Jan 04 11:54:24 crc kubenswrapper[5003]: I0104 11:54:24.047594 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-598cb7b6db-zhfcw"] Jan 04 11:54:24 crc kubenswrapper[5003]: I0104 11:54:24.048395 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-598cb7b6db-zhfcw" podUID="fe1cf823-a84c-4c16-ba91-1b5ccef99ed6" containerName="controller-manager" containerID="cri-o://2d7e6e9196c94e0fe46daa302077726149c33afc47c3adcbc2f45c8fabb02032" gracePeriod=30 Jan 04 11:54:24 crc kubenswrapper[5003]: I0104 11:54:24.465960 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-598cb7b6db-zhfcw" Jan 04 11:54:24 crc kubenswrapper[5003]: I0104 11:54:24.515094 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cvbfl\" (UniqueName: \"kubernetes.io/projected/fe1cf823-a84c-4c16-ba91-1b5ccef99ed6-kube-api-access-cvbfl\") pod \"fe1cf823-a84c-4c16-ba91-1b5ccef99ed6\" (UID: \"fe1cf823-a84c-4c16-ba91-1b5ccef99ed6\") " Jan 04 11:54:24 crc kubenswrapper[5003]: I0104 11:54:24.515195 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe1cf823-a84c-4c16-ba91-1b5ccef99ed6-config\") pod \"fe1cf823-a84c-4c16-ba91-1b5ccef99ed6\" (UID: \"fe1cf823-a84c-4c16-ba91-1b5ccef99ed6\") " Jan 04 11:54:24 crc kubenswrapper[5003]: I0104 11:54:24.515285 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fe1cf823-a84c-4c16-ba91-1b5ccef99ed6-proxy-ca-bundles\") pod \"fe1cf823-a84c-4c16-ba91-1b5ccef99ed6\" (UID: \"fe1cf823-a84c-4c16-ba91-1b5ccef99ed6\") " Jan 04 11:54:24 crc kubenswrapper[5003]: I0104 11:54:24.515307 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fe1cf823-a84c-4c16-ba91-1b5ccef99ed6-client-ca\") pod \"fe1cf823-a84c-4c16-ba91-1b5ccef99ed6\" (UID: \"fe1cf823-a84c-4c16-ba91-1b5ccef99ed6\") " Jan 04 11:54:24 crc kubenswrapper[5003]: I0104 11:54:24.515392 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fe1cf823-a84c-4c16-ba91-1b5ccef99ed6-serving-cert\") pod \"fe1cf823-a84c-4c16-ba91-1b5ccef99ed6\" (UID: \"fe1cf823-a84c-4c16-ba91-1b5ccef99ed6\") " Jan 04 11:54:24 crc kubenswrapper[5003]: I0104 11:54:24.516517 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe1cf823-a84c-4c16-ba91-1b5ccef99ed6-config" (OuterVolumeSpecName: "config") pod "fe1cf823-a84c-4c16-ba91-1b5ccef99ed6" (UID: "fe1cf823-a84c-4c16-ba91-1b5ccef99ed6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:54:24 crc kubenswrapper[5003]: I0104 11:54:24.516534 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe1cf823-a84c-4c16-ba91-1b5ccef99ed6-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "fe1cf823-a84c-4c16-ba91-1b5ccef99ed6" (UID: "fe1cf823-a84c-4c16-ba91-1b5ccef99ed6"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:54:24 crc kubenswrapper[5003]: I0104 11:54:24.516702 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe1cf823-a84c-4c16-ba91-1b5ccef99ed6-client-ca" (OuterVolumeSpecName: "client-ca") pod "fe1cf823-a84c-4c16-ba91-1b5ccef99ed6" (UID: "fe1cf823-a84c-4c16-ba91-1b5ccef99ed6"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:54:24 crc kubenswrapper[5003]: I0104 11:54:24.520774 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe1cf823-a84c-4c16-ba91-1b5ccef99ed6-kube-api-access-cvbfl" (OuterVolumeSpecName: "kube-api-access-cvbfl") pod "fe1cf823-a84c-4c16-ba91-1b5ccef99ed6" (UID: "fe1cf823-a84c-4c16-ba91-1b5ccef99ed6"). InnerVolumeSpecName "kube-api-access-cvbfl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:54:24 crc kubenswrapper[5003]: I0104 11:54:24.522520 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe1cf823-a84c-4c16-ba91-1b5ccef99ed6-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "fe1cf823-a84c-4c16-ba91-1b5ccef99ed6" (UID: "fe1cf823-a84c-4c16-ba91-1b5ccef99ed6"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:54:24 crc kubenswrapper[5003]: I0104 11:54:24.617655 5003 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/fe1cf823-a84c-4c16-ba91-1b5ccef99ed6-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:24 crc kubenswrapper[5003]: I0104 11:54:24.617706 5003 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fe1cf823-a84c-4c16-ba91-1b5ccef99ed6-client-ca\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:24 crc kubenswrapper[5003]: I0104 11:54:24.617719 5003 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fe1cf823-a84c-4c16-ba91-1b5ccef99ed6-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:24 crc kubenswrapper[5003]: I0104 11:54:24.617734 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cvbfl\" (UniqueName: \"kubernetes.io/projected/fe1cf823-a84c-4c16-ba91-1b5ccef99ed6-kube-api-access-cvbfl\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:24 crc kubenswrapper[5003]: I0104 11:54:24.617753 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe1cf823-a84c-4c16-ba91-1b5ccef99ed6-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:54:24 crc kubenswrapper[5003]: I0104 11:54:24.777676 5003 generic.go:334] "Generic (PLEG): container finished" podID="fe1cf823-a84c-4c16-ba91-1b5ccef99ed6" containerID="2d7e6e9196c94e0fe46daa302077726149c33afc47c3adcbc2f45c8fabb02032" exitCode=0 Jan 04 11:54:24 crc kubenswrapper[5003]: I0104 11:54:24.777756 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-598cb7b6db-zhfcw" event={"ID":"fe1cf823-a84c-4c16-ba91-1b5ccef99ed6","Type":"ContainerDied","Data":"2d7e6e9196c94e0fe46daa302077726149c33afc47c3adcbc2f45c8fabb02032"} Jan 04 11:54:24 crc kubenswrapper[5003]: I0104 11:54:24.778186 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-598cb7b6db-zhfcw" event={"ID":"fe1cf823-a84c-4c16-ba91-1b5ccef99ed6","Type":"ContainerDied","Data":"69118f59cfbcdbb426c89c69b1b3e9e97d25369b2df2e907f8711f786fdbcdfa"} Jan 04 11:54:24 crc kubenswrapper[5003]: I0104 11:54:24.777825 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-598cb7b6db-zhfcw" Jan 04 11:54:24 crc kubenswrapper[5003]: I0104 11:54:24.778282 5003 scope.go:117] "RemoveContainer" containerID="2d7e6e9196c94e0fe46daa302077726149c33afc47c3adcbc2f45c8fabb02032" Jan 04 11:54:24 crc kubenswrapper[5003]: I0104 11:54:24.803307 5003 scope.go:117] "RemoveContainer" containerID="2d7e6e9196c94e0fe46daa302077726149c33afc47c3adcbc2f45c8fabb02032" Jan 04 11:54:24 crc kubenswrapper[5003]: E0104 11:54:24.804166 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d7e6e9196c94e0fe46daa302077726149c33afc47c3adcbc2f45c8fabb02032\": container with ID starting with 2d7e6e9196c94e0fe46daa302077726149c33afc47c3adcbc2f45c8fabb02032 not found: ID does not exist" containerID="2d7e6e9196c94e0fe46daa302077726149c33afc47c3adcbc2f45c8fabb02032" Jan 04 11:54:24 crc kubenswrapper[5003]: I0104 11:54:24.804223 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d7e6e9196c94e0fe46daa302077726149c33afc47c3adcbc2f45c8fabb02032"} err="failed to get container status \"2d7e6e9196c94e0fe46daa302077726149c33afc47c3adcbc2f45c8fabb02032\": rpc error: code = NotFound desc = could not find container \"2d7e6e9196c94e0fe46daa302077726149c33afc47c3adcbc2f45c8fabb02032\": container with ID starting with 2d7e6e9196c94e0fe46daa302077726149c33afc47c3adcbc2f45c8fabb02032 not found: ID does not exist" Jan 04 11:54:24 crc kubenswrapper[5003]: I0104 11:54:24.827096 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-598cb7b6db-zhfcw"] Jan 04 11:54:24 crc kubenswrapper[5003]: I0104 11:54:24.834697 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-598cb7b6db-zhfcw"] Jan 04 11:54:25 crc kubenswrapper[5003]: I0104 11:54:25.261162 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-86d9549ff5-7gk9d"] Jan 04 11:54:25 crc kubenswrapper[5003]: E0104 11:54:25.261634 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edc8a447-cc41-4241-be6a-957fa4255108" containerName="registry" Jan 04 11:54:25 crc kubenswrapper[5003]: I0104 11:54:25.261653 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="edc8a447-cc41-4241-be6a-957fa4255108" containerName="registry" Jan 04 11:54:25 crc kubenswrapper[5003]: E0104 11:54:25.261690 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe1cf823-a84c-4c16-ba91-1b5ccef99ed6" containerName="controller-manager" Jan 04 11:54:25 crc kubenswrapper[5003]: I0104 11:54:25.261700 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe1cf823-a84c-4c16-ba91-1b5ccef99ed6" containerName="controller-manager" Jan 04 11:54:25 crc kubenswrapper[5003]: I0104 11:54:25.262482 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe1cf823-a84c-4c16-ba91-1b5ccef99ed6" containerName="controller-manager" Jan 04 11:54:25 crc kubenswrapper[5003]: I0104 11:54:25.262555 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="edc8a447-cc41-4241-be6a-957fa4255108" containerName="registry" Jan 04 11:54:25 crc kubenswrapper[5003]: I0104 11:54:25.263732 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-86d9549ff5-7gk9d" Jan 04 11:54:25 crc kubenswrapper[5003]: I0104 11:54:25.267777 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 04 11:54:25 crc kubenswrapper[5003]: I0104 11:54:25.268564 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 04 11:54:25 crc kubenswrapper[5003]: I0104 11:54:25.269420 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 04 11:54:25 crc kubenswrapper[5003]: I0104 11:54:25.270724 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 04 11:54:25 crc kubenswrapper[5003]: I0104 11:54:25.271291 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 04 11:54:25 crc kubenswrapper[5003]: I0104 11:54:25.271358 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 04 11:54:25 crc kubenswrapper[5003]: I0104 11:54:25.274581 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-86d9549ff5-7gk9d"] Jan 04 11:54:25 crc kubenswrapper[5003]: I0104 11:54:25.278510 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 04 11:54:25 crc kubenswrapper[5003]: I0104 11:54:25.428989 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e7092c2-e39b-4e46-b416-6202d3b5c5f5-serving-cert\") pod \"controller-manager-86d9549ff5-7gk9d\" (UID: \"4e7092c2-e39b-4e46-b416-6202d3b5c5f5\") " pod="openshift-controller-manager/controller-manager-86d9549ff5-7gk9d" Jan 04 11:54:25 crc kubenswrapper[5003]: I0104 11:54:25.429113 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e7092c2-e39b-4e46-b416-6202d3b5c5f5-config\") pod \"controller-manager-86d9549ff5-7gk9d\" (UID: \"4e7092c2-e39b-4e46-b416-6202d3b5c5f5\") " pod="openshift-controller-manager/controller-manager-86d9549ff5-7gk9d" Jan 04 11:54:25 crc kubenswrapper[5003]: I0104 11:54:25.429263 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bfqj7\" (UniqueName: \"kubernetes.io/projected/4e7092c2-e39b-4e46-b416-6202d3b5c5f5-kube-api-access-bfqj7\") pod \"controller-manager-86d9549ff5-7gk9d\" (UID: \"4e7092c2-e39b-4e46-b416-6202d3b5c5f5\") " pod="openshift-controller-manager/controller-manager-86d9549ff5-7gk9d" Jan 04 11:54:25 crc kubenswrapper[5003]: I0104 11:54:25.429535 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4e7092c2-e39b-4e46-b416-6202d3b5c5f5-client-ca\") pod \"controller-manager-86d9549ff5-7gk9d\" (UID: \"4e7092c2-e39b-4e46-b416-6202d3b5c5f5\") " pod="openshift-controller-manager/controller-manager-86d9549ff5-7gk9d" Jan 04 11:54:25 crc kubenswrapper[5003]: I0104 11:54:25.429628 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4e7092c2-e39b-4e46-b416-6202d3b5c5f5-proxy-ca-bundles\") pod \"controller-manager-86d9549ff5-7gk9d\" (UID: \"4e7092c2-e39b-4e46-b416-6202d3b5c5f5\") " pod="openshift-controller-manager/controller-manager-86d9549ff5-7gk9d" Jan 04 11:54:25 crc kubenswrapper[5003]: I0104 11:54:25.545680 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4e7092c2-e39b-4e46-b416-6202d3b5c5f5-client-ca\") pod \"controller-manager-86d9549ff5-7gk9d\" (UID: \"4e7092c2-e39b-4e46-b416-6202d3b5c5f5\") " pod="openshift-controller-manager/controller-manager-86d9549ff5-7gk9d" Jan 04 11:54:25 crc kubenswrapper[5003]: I0104 11:54:25.545851 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4e7092c2-e39b-4e46-b416-6202d3b5c5f5-proxy-ca-bundles\") pod \"controller-manager-86d9549ff5-7gk9d\" (UID: \"4e7092c2-e39b-4e46-b416-6202d3b5c5f5\") " pod="openshift-controller-manager/controller-manager-86d9549ff5-7gk9d" Jan 04 11:54:25 crc kubenswrapper[5003]: I0104 11:54:25.545931 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e7092c2-e39b-4e46-b416-6202d3b5c5f5-serving-cert\") pod \"controller-manager-86d9549ff5-7gk9d\" (UID: \"4e7092c2-e39b-4e46-b416-6202d3b5c5f5\") " pod="openshift-controller-manager/controller-manager-86d9549ff5-7gk9d" Jan 04 11:54:25 crc kubenswrapper[5003]: I0104 11:54:25.545984 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e7092c2-e39b-4e46-b416-6202d3b5c5f5-config\") pod \"controller-manager-86d9549ff5-7gk9d\" (UID: \"4e7092c2-e39b-4e46-b416-6202d3b5c5f5\") " pod="openshift-controller-manager/controller-manager-86d9549ff5-7gk9d" Jan 04 11:54:25 crc kubenswrapper[5003]: I0104 11:54:25.546242 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bfqj7\" (UniqueName: \"kubernetes.io/projected/4e7092c2-e39b-4e46-b416-6202d3b5c5f5-kube-api-access-bfqj7\") pod \"controller-manager-86d9549ff5-7gk9d\" (UID: \"4e7092c2-e39b-4e46-b416-6202d3b5c5f5\") " pod="openshift-controller-manager/controller-manager-86d9549ff5-7gk9d" Jan 04 11:54:25 crc kubenswrapper[5003]: I0104 11:54:25.548673 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4e7092c2-e39b-4e46-b416-6202d3b5c5f5-client-ca\") pod \"controller-manager-86d9549ff5-7gk9d\" (UID: \"4e7092c2-e39b-4e46-b416-6202d3b5c5f5\") " pod="openshift-controller-manager/controller-manager-86d9549ff5-7gk9d" Jan 04 11:54:25 crc kubenswrapper[5003]: I0104 11:54:25.549515 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e7092c2-e39b-4e46-b416-6202d3b5c5f5-config\") pod \"controller-manager-86d9549ff5-7gk9d\" (UID: \"4e7092c2-e39b-4e46-b416-6202d3b5c5f5\") " pod="openshift-controller-manager/controller-manager-86d9549ff5-7gk9d" Jan 04 11:54:25 crc kubenswrapper[5003]: I0104 11:54:25.552431 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4e7092c2-e39b-4e46-b416-6202d3b5c5f5-proxy-ca-bundles\") pod \"controller-manager-86d9549ff5-7gk9d\" (UID: \"4e7092c2-e39b-4e46-b416-6202d3b5c5f5\") " pod="openshift-controller-manager/controller-manager-86d9549ff5-7gk9d" Jan 04 11:54:25 crc kubenswrapper[5003]: I0104 11:54:25.555628 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e7092c2-e39b-4e46-b416-6202d3b5c5f5-serving-cert\") pod \"controller-manager-86d9549ff5-7gk9d\" (UID: \"4e7092c2-e39b-4e46-b416-6202d3b5c5f5\") " pod="openshift-controller-manager/controller-manager-86d9549ff5-7gk9d" Jan 04 11:54:25 crc kubenswrapper[5003]: I0104 11:54:25.571825 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bfqj7\" (UniqueName: \"kubernetes.io/projected/4e7092c2-e39b-4e46-b416-6202d3b5c5f5-kube-api-access-bfqj7\") pod \"controller-manager-86d9549ff5-7gk9d\" (UID: \"4e7092c2-e39b-4e46-b416-6202d3b5c5f5\") " pod="openshift-controller-manager/controller-manager-86d9549ff5-7gk9d" Jan 04 11:54:25 crc kubenswrapper[5003]: I0104 11:54:25.595430 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-86d9549ff5-7gk9d" Jan 04 11:54:26 crc kubenswrapper[5003]: I0104 11:54:26.070804 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-86d9549ff5-7gk9d"] Jan 04 11:54:26 crc kubenswrapper[5003]: I0104 11:54:26.793808 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-86d9549ff5-7gk9d" event={"ID":"4e7092c2-e39b-4e46-b416-6202d3b5c5f5","Type":"ContainerStarted","Data":"610f63d1638f898a6f2c724dc7fc248d433b580b7b1e65e24987a08a4e508b27"} Jan 04 11:54:26 crc kubenswrapper[5003]: I0104 11:54:26.795123 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-86d9549ff5-7gk9d" event={"ID":"4e7092c2-e39b-4e46-b416-6202d3b5c5f5","Type":"ContainerStarted","Data":"bacc2dcd8e9e51584241284aa1ae3223fbe8c6787a86904e0a8c93189837750c"} Jan 04 11:54:26 crc kubenswrapper[5003]: I0104 11:54:26.795270 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-86d9549ff5-7gk9d" Jan 04 11:54:26 crc kubenswrapper[5003]: I0104 11:54:26.799633 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-86d9549ff5-7gk9d" Jan 04 11:54:26 crc kubenswrapper[5003]: I0104 11:54:26.835334 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe1cf823-a84c-4c16-ba91-1b5ccef99ed6" path="/var/lib/kubelet/pods/fe1cf823-a84c-4c16-ba91-1b5ccef99ed6/volumes" Jan 04 11:54:26 crc kubenswrapper[5003]: I0104 11:54:26.839579 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-86d9549ff5-7gk9d" podStartSLOduration=2.839561756 podStartE2EDuration="2.839561756s" podCreationTimestamp="2026-01-04 11:54:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 11:54:26.819417417 +0000 UTC m=+382.292447288" watchObservedRunningTime="2026-01-04 11:54:26.839561756 +0000 UTC m=+382.312591597" Jan 04 11:54:39 crc kubenswrapper[5003]: I0104 11:54:39.419044 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 11:54:39 crc kubenswrapper[5003]: I0104 11:54:39.419716 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 11:55:09 crc kubenswrapper[5003]: I0104 11:55:09.418751 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 11:55:09 crc kubenswrapper[5003]: I0104 11:55:09.419269 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 11:55:09 crc kubenswrapper[5003]: I0104 11:55:09.419322 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 11:55:09 crc kubenswrapper[5003]: I0104 11:55:09.420095 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"38ae09deeffc8fbaeb90157004c63691cc52dd226000ec476514c7c6cd21089c"} pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 11:55:09 crc kubenswrapper[5003]: I0104 11:55:09.420238 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" containerID="cri-o://38ae09deeffc8fbaeb90157004c63691cc52dd226000ec476514c7c6cd21089c" gracePeriod=600 Jan 04 11:55:10 crc kubenswrapper[5003]: I0104 11:55:10.033852 5003 generic.go:334] "Generic (PLEG): container finished" podID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerID="38ae09deeffc8fbaeb90157004c63691cc52dd226000ec476514c7c6cd21089c" exitCode=0 Jan 04 11:55:10 crc kubenswrapper[5003]: I0104 11:55:10.034056 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerDied","Data":"38ae09deeffc8fbaeb90157004c63691cc52dd226000ec476514c7c6cd21089c"} Jan 04 11:55:10 crc kubenswrapper[5003]: I0104 11:55:10.034645 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerStarted","Data":"e6d83c13c4c3536bde3eb2f53b4fd3ead25d7f9dc8d9bc8c2b42265b335adda3"} Jan 04 11:55:10 crc kubenswrapper[5003]: I0104 11:55:10.034807 5003 scope.go:117] "RemoveContainer" containerID="0a798fcf8f9cd5527b591d753d73fc4914cbff107ec51b383503af8b81b53039" Jan 04 11:57:09 crc kubenswrapper[5003]: I0104 11:57:09.419178 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 11:57:09 crc kubenswrapper[5003]: I0104 11:57:09.420095 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 11:57:39 crc kubenswrapper[5003]: I0104 11:57:39.418331 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 11:57:39 crc kubenswrapper[5003]: I0104 11:57:39.420270 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 11:58:09 crc kubenswrapper[5003]: I0104 11:58:09.418964 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 11:58:09 crc kubenswrapper[5003]: I0104 11:58:09.419832 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 11:58:09 crc kubenswrapper[5003]: I0104 11:58:09.419908 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 11:58:09 crc kubenswrapper[5003]: I0104 11:58:09.420939 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e6d83c13c4c3536bde3eb2f53b4fd3ead25d7f9dc8d9bc8c2b42265b335adda3"} pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 11:58:09 crc kubenswrapper[5003]: I0104 11:58:09.421081 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" containerID="cri-o://e6d83c13c4c3536bde3eb2f53b4fd3ead25d7f9dc8d9bc8c2b42265b335adda3" gracePeriod=600 Jan 04 11:58:10 crc kubenswrapper[5003]: I0104 11:58:10.351563 5003 generic.go:334] "Generic (PLEG): container finished" podID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerID="e6d83c13c4c3536bde3eb2f53b4fd3ead25d7f9dc8d9bc8c2b42265b335adda3" exitCode=0 Jan 04 11:58:10 crc kubenswrapper[5003]: I0104 11:58:10.351675 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerDied","Data":"e6d83c13c4c3536bde3eb2f53b4fd3ead25d7f9dc8d9bc8c2b42265b335adda3"} Jan 04 11:58:10 crc kubenswrapper[5003]: I0104 11:58:10.352055 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerStarted","Data":"544543b77ddff68504c56117c730883967d4ef6eb8006a6d7bde181f583bbabc"} Jan 04 11:58:10 crc kubenswrapper[5003]: I0104 11:58:10.352090 5003 scope.go:117] "RemoveContainer" containerID="38ae09deeffc8fbaeb90157004c63691cc52dd226000ec476514c7c6cd21089c" Jan 04 11:59:56 crc kubenswrapper[5003]: I0104 11:59:56.917331 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-2lwxt"] Jan 04 11:59:56 crc kubenswrapper[5003]: I0104 11:59:56.918722 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="ovn-controller" containerID="cri-o://ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509" gracePeriod=30 Jan 04 11:59:56 crc kubenswrapper[5003]: I0104 11:59:56.918815 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="nbdb" containerID="cri-o://65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0" gracePeriod=30 Jan 04 11:59:56 crc kubenswrapper[5003]: I0104 11:59:56.918817 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0" gracePeriod=30 Jan 04 11:59:56 crc kubenswrapper[5003]: I0104 11:59:56.918924 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="kube-rbac-proxy-node" containerID="cri-o://ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58" gracePeriod=30 Jan 04 11:59:56 crc kubenswrapper[5003]: I0104 11:59:56.918990 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="ovn-acl-logging" containerID="cri-o://b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910" gracePeriod=30 Jan 04 11:59:56 crc kubenswrapper[5003]: I0104 11:59:56.919068 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="northd" containerID="cri-o://31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac" gracePeriod=30 Jan 04 11:59:56 crc kubenswrapper[5003]: I0104 11:59:56.919300 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="sbdb" containerID="cri-o://f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039" gracePeriod=30 Jan 04 11:59:56 crc kubenswrapper[5003]: I0104 11:59:56.985477 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="ovnkube-controller" containerID="cri-o://76f6d35e6148e1ed7e1bfc323d651dc6017b638bb6af5c8da42247285e08b623" gracePeriod=30 Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.072926 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-np5qh_6e5d41d8-142e-4ca3-a20a-f6d338aaddf2/kube-multus/2.log" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.073424 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-np5qh_6e5d41d8-142e-4ca3-a20a-f6d338aaddf2/kube-multus/1.log" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.073457 5003 generic.go:334] "Generic (PLEG): container finished" podID="6e5d41d8-142e-4ca3-a20a-f6d338aaddf2" containerID="db992e1ff8ee746d747f10a8b6c8a30b540e1efe5581e111fa25a5cd5467774d" exitCode=2 Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.073509 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-np5qh" event={"ID":"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2","Type":"ContainerDied","Data":"db992e1ff8ee746d747f10a8b6c8a30b540e1efe5581e111fa25a5cd5467774d"} Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.073556 5003 scope.go:117] "RemoveContainer" containerID="0cbf399dee48d6e9caa2594fb848b76a2dfc1afea412cbbf8a72f715f6c8e4e7" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.074154 5003 scope.go:117] "RemoveContainer" containerID="db992e1ff8ee746d747f10a8b6c8a30b540e1efe5581e111fa25a5cd5467774d" Jan 04 11:59:57 crc kubenswrapper[5003]: E0104 11:59:57.074479 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-np5qh_openshift-multus(6e5d41d8-142e-4ca3-a20a-f6d338aaddf2)\"" pod="openshift-multus/multus-np5qh" podUID="6e5d41d8-142e-4ca3-a20a-f6d338aaddf2" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.086065 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lwxt_e40671d3-61d7-4a50-b4ea-a67e4005fc3f/ovnkube-controller/3.log" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.090237 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lwxt_e40671d3-61d7-4a50-b4ea-a67e4005fc3f/ovn-acl-logging/0.log" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.091614 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lwxt_e40671d3-61d7-4a50-b4ea-a67e4005fc3f/ovn-controller/0.log" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.094855 5003 generic.go:334] "Generic (PLEG): container finished" podID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerID="0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0" exitCode=0 Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.094888 5003 generic.go:334] "Generic (PLEG): container finished" podID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerID="ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58" exitCode=0 Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.094897 5003 generic.go:334] "Generic (PLEG): container finished" podID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerID="b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910" exitCode=143 Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.094909 5003 generic.go:334] "Generic (PLEG): container finished" podID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerID="ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509" exitCode=143 Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.094934 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" event={"ID":"e40671d3-61d7-4a50-b4ea-a67e4005fc3f","Type":"ContainerDied","Data":"0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0"} Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.094982 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" event={"ID":"e40671d3-61d7-4a50-b4ea-a67e4005fc3f","Type":"ContainerDied","Data":"ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58"} Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.094996 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" event={"ID":"e40671d3-61d7-4a50-b4ea-a67e4005fc3f","Type":"ContainerDied","Data":"b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910"} Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.095025 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" event={"ID":"e40671d3-61d7-4a50-b4ea-a67e4005fc3f","Type":"ContainerDied","Data":"ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509"} Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.236304 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lwxt_e40671d3-61d7-4a50-b4ea-a67e4005fc3f/ovnkube-controller/3.log" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.239735 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lwxt_e40671d3-61d7-4a50-b4ea-a67e4005fc3f/ovn-acl-logging/0.log" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.240265 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lwxt_e40671d3-61d7-4a50-b4ea-a67e4005fc3f/ovn-controller/0.log" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.240775 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.305771 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-w22h9"] Jan 04 11:59:57 crc kubenswrapper[5003]: E0104 11:59:57.306067 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="ovnkube-controller" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.306082 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="ovnkube-controller" Jan 04 11:59:57 crc kubenswrapper[5003]: E0104 11:59:57.306094 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="ovnkube-controller" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.306100 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="ovnkube-controller" Jan 04 11:59:57 crc kubenswrapper[5003]: E0104 11:59:57.306109 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="kubecfg-setup" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.306115 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="kubecfg-setup" Jan 04 11:59:57 crc kubenswrapper[5003]: E0104 11:59:57.306123 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="kube-rbac-proxy-node" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.306129 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="kube-rbac-proxy-node" Jan 04 11:59:57 crc kubenswrapper[5003]: E0104 11:59:57.306137 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="ovnkube-controller" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.306143 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="ovnkube-controller" Jan 04 11:59:57 crc kubenswrapper[5003]: E0104 11:59:57.306153 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="northd" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.306159 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="northd" Jan 04 11:59:57 crc kubenswrapper[5003]: E0104 11:59:57.306166 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="ovn-controller" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.306171 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="ovn-controller" Jan 04 11:59:57 crc kubenswrapper[5003]: E0104 11:59:57.306181 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="ovnkube-controller" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.306187 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="ovnkube-controller" Jan 04 11:59:57 crc kubenswrapper[5003]: E0104 11:59:57.306196 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="sbdb" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.306202 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="sbdb" Jan 04 11:59:57 crc kubenswrapper[5003]: E0104 11:59:57.306209 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="ovnkube-controller" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.306214 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="ovnkube-controller" Jan 04 11:59:57 crc kubenswrapper[5003]: E0104 11:59:57.306223 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="ovn-acl-logging" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.306229 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="ovn-acl-logging" Jan 04 11:59:57 crc kubenswrapper[5003]: E0104 11:59:57.306237 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="kube-rbac-proxy-ovn-metrics" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.306243 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="kube-rbac-proxy-ovn-metrics" Jan 04 11:59:57 crc kubenswrapper[5003]: E0104 11:59:57.306256 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="nbdb" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.306262 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="nbdb" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.306357 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="ovn-acl-logging" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.306367 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="nbdb" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.306374 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="ovnkube-controller" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.306381 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="kube-rbac-proxy-node" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.306387 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="ovnkube-controller" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.306393 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="northd" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.306400 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="ovnkube-controller" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.306409 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="ovnkube-controller" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.306416 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="sbdb" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.306422 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="ovn-controller" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.306428 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="kube-rbac-proxy-ovn-metrics" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.306607 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerName="ovnkube-controller" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.309178 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.348943 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-ovn-node-metrics-cert\") pod \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.349061 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-ovnkube-script-lib\") pod \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.349117 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-etc-openvswitch\") pod \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.349172 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-run-systemd\") pod \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.349218 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.349256 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-env-overrides\") pod \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.349297 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-run-openvswitch\") pod \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.349335 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-slash\") pod \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.349363 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-cni-netd\") pod \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.349396 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-run-netns\") pod \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.349450 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-var-lib-openvswitch\") pod \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.349494 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-ovnkube-config\") pod \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.349547 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-run-ovn-kubernetes\") pod \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.349610 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ss2hj\" (UniqueName: \"kubernetes.io/projected/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-kube-api-access-ss2hj\") pod \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.349663 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-cni-bin\") pod \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.349802 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-run-ovn\") pod \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.349837 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-node-log\") pod \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.349871 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-kubelet\") pod \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.349933 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-systemd-units\") pod \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.349874 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-slash" (OuterVolumeSpecName: "host-slash") pod "e40671d3-61d7-4a50-b4ea-a67e4005fc3f" (UID: "e40671d3-61d7-4a50-b4ea-a67e4005fc3f"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.350061 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-log-socket\") pod \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\" (UID: \"e40671d3-61d7-4a50-b4ea-a67e4005fc3f\") " Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.350621 5003 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-slash\") on node \"crc\" DevicePath \"\"" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.350723 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-log-socket" (OuterVolumeSpecName: "log-socket") pod "e40671d3-61d7-4a50-b4ea-a67e4005fc3f" (UID: "e40671d3-61d7-4a50-b4ea-a67e4005fc3f"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.350787 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "e40671d3-61d7-4a50-b4ea-a67e4005fc3f" (UID: "e40671d3-61d7-4a50-b4ea-a67e4005fc3f"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.350819 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "e40671d3-61d7-4a50-b4ea-a67e4005fc3f" (UID: "e40671d3-61d7-4a50-b4ea-a67e4005fc3f"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.350890 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "e40671d3-61d7-4a50-b4ea-a67e4005fc3f" (UID: "e40671d3-61d7-4a50-b4ea-a67e4005fc3f"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.351554 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "e40671d3-61d7-4a50-b4ea-a67e4005fc3f" (UID: "e40671d3-61d7-4a50-b4ea-a67e4005fc3f"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.351607 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "e40671d3-61d7-4a50-b4ea-a67e4005fc3f" (UID: "e40671d3-61d7-4a50-b4ea-a67e4005fc3f"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.353318 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-node-log" (OuterVolumeSpecName: "node-log") pod "e40671d3-61d7-4a50-b4ea-a67e4005fc3f" (UID: "e40671d3-61d7-4a50-b4ea-a67e4005fc3f"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.353447 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "e40671d3-61d7-4a50-b4ea-a67e4005fc3f" (UID: "e40671d3-61d7-4a50-b4ea-a67e4005fc3f"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.353493 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "e40671d3-61d7-4a50-b4ea-a67e4005fc3f" (UID: "e40671d3-61d7-4a50-b4ea-a67e4005fc3f"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.354035 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "e40671d3-61d7-4a50-b4ea-a67e4005fc3f" (UID: "e40671d3-61d7-4a50-b4ea-a67e4005fc3f"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.354399 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "e40671d3-61d7-4a50-b4ea-a67e4005fc3f" (UID: "e40671d3-61d7-4a50-b4ea-a67e4005fc3f"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.354453 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "e40671d3-61d7-4a50-b4ea-a67e4005fc3f" (UID: "e40671d3-61d7-4a50-b4ea-a67e4005fc3f"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.354487 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "e40671d3-61d7-4a50-b4ea-a67e4005fc3f" (UID: "e40671d3-61d7-4a50-b4ea-a67e4005fc3f"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.354528 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "e40671d3-61d7-4a50-b4ea-a67e4005fc3f" (UID: "e40671d3-61d7-4a50-b4ea-a67e4005fc3f"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.354546 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "e40671d3-61d7-4a50-b4ea-a67e4005fc3f" (UID: "e40671d3-61d7-4a50-b4ea-a67e4005fc3f"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.355803 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "e40671d3-61d7-4a50-b4ea-a67e4005fc3f" (UID: "e40671d3-61d7-4a50-b4ea-a67e4005fc3f"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.358108 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "e40671d3-61d7-4a50-b4ea-a67e4005fc3f" (UID: "e40671d3-61d7-4a50-b4ea-a67e4005fc3f"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.359089 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-kube-api-access-ss2hj" (OuterVolumeSpecName: "kube-api-access-ss2hj") pod "e40671d3-61d7-4a50-b4ea-a67e4005fc3f" (UID: "e40671d3-61d7-4a50-b4ea-a67e4005fc3f"). InnerVolumeSpecName "kube-api-access-ss2hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.366036 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "e40671d3-61d7-4a50-b4ea-a67e4005fc3f" (UID: "e40671d3-61d7-4a50-b4ea-a67e4005fc3f"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.452645 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-host-kubelet\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.452726 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-host-cni-netd\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.452757 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-node-log\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.452784 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-log-socket\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.452824 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-host-run-netns\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.452855 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-run-systemd\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.452886 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-ovnkube-script-lib\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.452927 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-ovn-node-metrics-cert\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.452957 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-host-run-ovn-kubernetes\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.452998 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-systemd-units\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.453067 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-host-slash\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.453108 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-host-cni-bin\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.453140 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9wlpg\" (UniqueName: \"kubernetes.io/projected/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-kube-api-access-9wlpg\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.453169 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-var-lib-openvswitch\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.453208 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-env-overrides\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.453242 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-run-ovn\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.453278 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.453486 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-run-openvswitch\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.453524 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-ovnkube-config\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.453547 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-etc-openvswitch\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.453620 5003 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-run-systemd\") on node \"crc\" DevicePath \"\"" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.453641 5003 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.453661 5003 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.453676 5003 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-run-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.453688 5003 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-cni-netd\") on node \"crc\" DevicePath \"\"" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.453700 5003 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-run-netns\") on node \"crc\" DevicePath \"\"" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.453713 5003 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.453725 5003 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.453741 5003 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.453756 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ss2hj\" (UniqueName: \"kubernetes.io/projected/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-kube-api-access-ss2hj\") on node \"crc\" DevicePath \"\"" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.455046 5003 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-cni-bin\") on node \"crc\" DevicePath \"\"" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.455070 5003 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.455086 5003 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-node-log\") on node \"crc\" DevicePath \"\"" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.455105 5003 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-host-kubelet\") on node \"crc\" DevicePath \"\"" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.455120 5003 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-systemd-units\") on node \"crc\" DevicePath \"\"" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.455137 5003 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-log-socket\") on node \"crc\" DevicePath \"\"" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.455153 5003 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.455170 5003 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.455187 5003 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e40671d3-61d7-4a50-b4ea-a67e4005fc3f-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.560538 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-host-kubelet\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.560644 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-host-cni-netd\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.560699 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-node-log\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.560748 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-log-socket\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.560825 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-host-run-netns\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.560859 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-run-systemd\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.560899 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-ovnkube-script-lib\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.560943 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-ovn-node-metrics-cert\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.560981 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-host-run-ovn-kubernetes\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.561040 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-systemd-units\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.561116 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-host-slash\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.561161 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-host-cni-bin\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.561189 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9wlpg\" (UniqueName: \"kubernetes.io/projected/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-kube-api-access-9wlpg\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.561226 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-var-lib-openvswitch\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.561267 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-run-ovn\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.561306 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-env-overrides\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.561349 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.561410 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-run-openvswitch\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.561438 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-ovnkube-config\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.561474 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-etc-openvswitch\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.561619 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-etc-openvswitch\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.561691 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-host-kubelet\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.561735 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-host-cni-netd\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.561780 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-node-log\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.561817 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-log-socket\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.561860 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-host-run-netns\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.561897 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-run-systemd\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.563320 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-ovnkube-script-lib\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.568266 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-var-lib-openvswitch\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.568359 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-host-slash\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.568410 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-host-cni-bin\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.568675 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-systemd-units\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.568832 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.568927 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-host-run-ovn-kubernetes\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.569925 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-env-overrides\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.569990 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-run-ovn\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.570198 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-run-openvswitch\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.570969 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-ovnkube-config\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.575132 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-ovn-node-metrics-cert\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.598745 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9wlpg\" (UniqueName: \"kubernetes.io/projected/cd4dd53a-69b5-49aa-868a-6bbc3cc61a23-kube-api-access-9wlpg\") pod \"ovnkube-node-w22h9\" (UID: \"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23\") " pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:57 crc kubenswrapper[5003]: I0104 11:59:57.625502 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.100497 5003 generic.go:334] "Generic (PLEG): container finished" podID="cd4dd53a-69b5-49aa-868a-6bbc3cc61a23" containerID="131d724dfbe9b66c961b56be9aac22b302520148ca5b95f2f4c224cda7754872" exitCode=0 Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.100597 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" event={"ID":"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23","Type":"ContainerDied","Data":"131d724dfbe9b66c961b56be9aac22b302520148ca5b95f2f4c224cda7754872"} Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.100958 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" event={"ID":"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23","Type":"ContainerStarted","Data":"53b29a6c4613d1ac93675c8a94ea95e7a5130e703b184eae8f5598235b5d8ad7"} Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.103770 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-np5qh_6e5d41d8-142e-4ca3-a20a-f6d338aaddf2/kube-multus/2.log" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.106328 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lwxt_e40671d3-61d7-4a50-b4ea-a67e4005fc3f/ovnkube-controller/3.log" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.108536 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lwxt_e40671d3-61d7-4a50-b4ea-a67e4005fc3f/ovn-acl-logging/0.log" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.108948 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2lwxt_e40671d3-61d7-4a50-b4ea-a67e4005fc3f/ovn-controller/0.log" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.109445 5003 generic.go:334] "Generic (PLEG): container finished" podID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerID="76f6d35e6148e1ed7e1bfc323d651dc6017b638bb6af5c8da42247285e08b623" exitCode=0 Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.109465 5003 generic.go:334] "Generic (PLEG): container finished" podID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerID="f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039" exitCode=0 Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.109474 5003 generic.go:334] "Generic (PLEG): container finished" podID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerID="65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0" exitCode=0 Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.109483 5003 generic.go:334] "Generic (PLEG): container finished" podID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" containerID="31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac" exitCode=0 Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.109501 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" event={"ID":"e40671d3-61d7-4a50-b4ea-a67e4005fc3f","Type":"ContainerDied","Data":"76f6d35e6148e1ed7e1bfc323d651dc6017b638bb6af5c8da42247285e08b623"} Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.109516 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" event={"ID":"e40671d3-61d7-4a50-b4ea-a67e4005fc3f","Type":"ContainerDied","Data":"f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039"} Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.109526 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" event={"ID":"e40671d3-61d7-4a50-b4ea-a67e4005fc3f","Type":"ContainerDied","Data":"65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0"} Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.109535 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" event={"ID":"e40671d3-61d7-4a50-b4ea-a67e4005fc3f","Type":"ContainerDied","Data":"31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac"} Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.109544 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" event={"ID":"e40671d3-61d7-4a50-b4ea-a67e4005fc3f","Type":"ContainerDied","Data":"79ff0fefa09b629abc8ff4c48668385df9c13ea993bf4c6098145b3ee2df541f"} Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.109559 5003 scope.go:117] "RemoveContainer" containerID="76f6d35e6148e1ed7e1bfc323d651dc6017b638bb6af5c8da42247285e08b623" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.109677 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-2lwxt" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.140240 5003 scope.go:117] "RemoveContainer" containerID="f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.156972 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-2lwxt"] Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.161318 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-2lwxt"] Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.171787 5003 scope.go:117] "RemoveContainer" containerID="f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.207799 5003 scope.go:117] "RemoveContainer" containerID="65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.246725 5003 scope.go:117] "RemoveContainer" containerID="31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.263101 5003 scope.go:117] "RemoveContainer" containerID="0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.291060 5003 scope.go:117] "RemoveContainer" containerID="ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.319399 5003 scope.go:117] "RemoveContainer" containerID="b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.362119 5003 scope.go:117] "RemoveContainer" containerID="ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.386929 5003 scope.go:117] "RemoveContainer" containerID="0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.414167 5003 scope.go:117] "RemoveContainer" containerID="76f6d35e6148e1ed7e1bfc323d651dc6017b638bb6af5c8da42247285e08b623" Jan 04 11:59:58 crc kubenswrapper[5003]: E0104 11:59:58.414853 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76f6d35e6148e1ed7e1bfc323d651dc6017b638bb6af5c8da42247285e08b623\": container with ID starting with 76f6d35e6148e1ed7e1bfc323d651dc6017b638bb6af5c8da42247285e08b623 not found: ID does not exist" containerID="76f6d35e6148e1ed7e1bfc323d651dc6017b638bb6af5c8da42247285e08b623" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.414903 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76f6d35e6148e1ed7e1bfc323d651dc6017b638bb6af5c8da42247285e08b623"} err="failed to get container status \"76f6d35e6148e1ed7e1bfc323d651dc6017b638bb6af5c8da42247285e08b623\": rpc error: code = NotFound desc = could not find container \"76f6d35e6148e1ed7e1bfc323d651dc6017b638bb6af5c8da42247285e08b623\": container with ID starting with 76f6d35e6148e1ed7e1bfc323d651dc6017b638bb6af5c8da42247285e08b623 not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.414942 5003 scope.go:117] "RemoveContainer" containerID="f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010" Jan 04 11:59:58 crc kubenswrapper[5003]: E0104 11:59:58.415629 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010\": container with ID starting with f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010 not found: ID does not exist" containerID="f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.415655 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010"} err="failed to get container status \"f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010\": rpc error: code = NotFound desc = could not find container \"f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010\": container with ID starting with f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010 not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.415674 5003 scope.go:117] "RemoveContainer" containerID="f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039" Jan 04 11:59:58 crc kubenswrapper[5003]: E0104 11:59:58.416093 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\": container with ID starting with f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039 not found: ID does not exist" containerID="f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.416123 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039"} err="failed to get container status \"f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\": rpc error: code = NotFound desc = could not find container \"f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\": container with ID starting with f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039 not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.416161 5003 scope.go:117] "RemoveContainer" containerID="65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0" Jan 04 11:59:58 crc kubenswrapper[5003]: E0104 11:59:58.416510 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\": container with ID starting with 65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0 not found: ID does not exist" containerID="65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.416540 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0"} err="failed to get container status \"65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\": rpc error: code = NotFound desc = could not find container \"65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\": container with ID starting with 65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0 not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.416560 5003 scope.go:117] "RemoveContainer" containerID="31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac" Jan 04 11:59:58 crc kubenswrapper[5003]: E0104 11:59:58.416983 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\": container with ID starting with 31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac not found: ID does not exist" containerID="31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.417008 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac"} err="failed to get container status \"31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\": rpc error: code = NotFound desc = could not find container \"31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\": container with ID starting with 31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.417040 5003 scope.go:117] "RemoveContainer" containerID="0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0" Jan 04 11:59:58 crc kubenswrapper[5003]: E0104 11:59:58.417435 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\": container with ID starting with 0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0 not found: ID does not exist" containerID="0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.417518 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0"} err="failed to get container status \"0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\": rpc error: code = NotFound desc = could not find container \"0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\": container with ID starting with 0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0 not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.417580 5003 scope.go:117] "RemoveContainer" containerID="ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58" Jan 04 11:59:58 crc kubenswrapper[5003]: E0104 11:59:58.418031 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\": container with ID starting with ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58 not found: ID does not exist" containerID="ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.418056 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58"} err="failed to get container status \"ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\": rpc error: code = NotFound desc = could not find container \"ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\": container with ID starting with ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58 not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.418073 5003 scope.go:117] "RemoveContainer" containerID="b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910" Jan 04 11:59:58 crc kubenswrapper[5003]: E0104 11:59:58.418366 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\": container with ID starting with b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910 not found: ID does not exist" containerID="b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.418441 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910"} err="failed to get container status \"b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\": rpc error: code = NotFound desc = could not find container \"b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\": container with ID starting with b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910 not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.418473 5003 scope.go:117] "RemoveContainer" containerID="ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509" Jan 04 11:59:58 crc kubenswrapper[5003]: E0104 11:59:58.418716 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\": container with ID starting with ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509 not found: ID does not exist" containerID="ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.418745 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509"} err="failed to get container status \"ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\": rpc error: code = NotFound desc = could not find container \"ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\": container with ID starting with ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509 not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.418764 5003 scope.go:117] "RemoveContainer" containerID="0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b" Jan 04 11:59:58 crc kubenswrapper[5003]: E0104 11:59:58.419182 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\": container with ID starting with 0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b not found: ID does not exist" containerID="0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.419403 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b"} err="failed to get container status \"0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\": rpc error: code = NotFound desc = could not find container \"0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\": container with ID starting with 0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.419435 5003 scope.go:117] "RemoveContainer" containerID="76f6d35e6148e1ed7e1bfc323d651dc6017b638bb6af5c8da42247285e08b623" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.419687 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76f6d35e6148e1ed7e1bfc323d651dc6017b638bb6af5c8da42247285e08b623"} err="failed to get container status \"76f6d35e6148e1ed7e1bfc323d651dc6017b638bb6af5c8da42247285e08b623\": rpc error: code = NotFound desc = could not find container \"76f6d35e6148e1ed7e1bfc323d651dc6017b638bb6af5c8da42247285e08b623\": container with ID starting with 76f6d35e6148e1ed7e1bfc323d651dc6017b638bb6af5c8da42247285e08b623 not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.419716 5003 scope.go:117] "RemoveContainer" containerID="f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.421677 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010"} err="failed to get container status \"f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010\": rpc error: code = NotFound desc = could not find container \"f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010\": container with ID starting with f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010 not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.421713 5003 scope.go:117] "RemoveContainer" containerID="f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.422070 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039"} err="failed to get container status \"f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\": rpc error: code = NotFound desc = could not find container \"f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\": container with ID starting with f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039 not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.422101 5003 scope.go:117] "RemoveContainer" containerID="65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.422477 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0"} err="failed to get container status \"65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\": rpc error: code = NotFound desc = could not find container \"65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\": container with ID starting with 65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0 not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.422500 5003 scope.go:117] "RemoveContainer" containerID="31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.422775 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac"} err="failed to get container status \"31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\": rpc error: code = NotFound desc = could not find container \"31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\": container with ID starting with 31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.422820 5003 scope.go:117] "RemoveContainer" containerID="0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.423092 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0"} err="failed to get container status \"0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\": rpc error: code = NotFound desc = could not find container \"0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\": container with ID starting with 0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0 not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.423123 5003 scope.go:117] "RemoveContainer" containerID="ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.423454 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58"} err="failed to get container status \"ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\": rpc error: code = NotFound desc = could not find container \"ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\": container with ID starting with ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58 not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.423475 5003 scope.go:117] "RemoveContainer" containerID="b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.423720 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910"} err="failed to get container status \"b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\": rpc error: code = NotFound desc = could not find container \"b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\": container with ID starting with b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910 not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.423738 5003 scope.go:117] "RemoveContainer" containerID="ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.423938 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509"} err="failed to get container status \"ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\": rpc error: code = NotFound desc = could not find container \"ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\": container with ID starting with ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509 not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.423955 5003 scope.go:117] "RemoveContainer" containerID="0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.424180 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b"} err="failed to get container status \"0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\": rpc error: code = NotFound desc = could not find container \"0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\": container with ID starting with 0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.424206 5003 scope.go:117] "RemoveContainer" containerID="76f6d35e6148e1ed7e1bfc323d651dc6017b638bb6af5c8da42247285e08b623" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.424575 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76f6d35e6148e1ed7e1bfc323d651dc6017b638bb6af5c8da42247285e08b623"} err="failed to get container status \"76f6d35e6148e1ed7e1bfc323d651dc6017b638bb6af5c8da42247285e08b623\": rpc error: code = NotFound desc = could not find container \"76f6d35e6148e1ed7e1bfc323d651dc6017b638bb6af5c8da42247285e08b623\": container with ID starting with 76f6d35e6148e1ed7e1bfc323d651dc6017b638bb6af5c8da42247285e08b623 not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.424602 5003 scope.go:117] "RemoveContainer" containerID="f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.424835 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010"} err="failed to get container status \"f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010\": rpc error: code = NotFound desc = could not find container \"f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010\": container with ID starting with f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010 not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.424863 5003 scope.go:117] "RemoveContainer" containerID="f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.425102 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039"} err="failed to get container status \"f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\": rpc error: code = NotFound desc = could not find container \"f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\": container with ID starting with f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039 not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.425130 5003 scope.go:117] "RemoveContainer" containerID="65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.425309 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0"} err="failed to get container status \"65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\": rpc error: code = NotFound desc = could not find container \"65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\": container with ID starting with 65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0 not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.425334 5003 scope.go:117] "RemoveContainer" containerID="31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.425600 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac"} err="failed to get container status \"31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\": rpc error: code = NotFound desc = could not find container \"31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\": container with ID starting with 31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.425634 5003 scope.go:117] "RemoveContainer" containerID="0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.425937 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0"} err="failed to get container status \"0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\": rpc error: code = NotFound desc = could not find container \"0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\": container with ID starting with 0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0 not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.425967 5003 scope.go:117] "RemoveContainer" containerID="ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.426206 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58"} err="failed to get container status \"ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\": rpc error: code = NotFound desc = could not find container \"ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\": container with ID starting with ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58 not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.426235 5003 scope.go:117] "RemoveContainer" containerID="b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.426539 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910"} err="failed to get container status \"b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\": rpc error: code = NotFound desc = could not find container \"b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\": container with ID starting with b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910 not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.426566 5003 scope.go:117] "RemoveContainer" containerID="ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.426802 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509"} err="failed to get container status \"ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\": rpc error: code = NotFound desc = could not find container \"ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\": container with ID starting with ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509 not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.426838 5003 scope.go:117] "RemoveContainer" containerID="0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.427164 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b"} err="failed to get container status \"0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\": rpc error: code = NotFound desc = could not find container \"0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\": container with ID starting with 0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.427192 5003 scope.go:117] "RemoveContainer" containerID="76f6d35e6148e1ed7e1bfc323d651dc6017b638bb6af5c8da42247285e08b623" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.427565 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76f6d35e6148e1ed7e1bfc323d651dc6017b638bb6af5c8da42247285e08b623"} err="failed to get container status \"76f6d35e6148e1ed7e1bfc323d651dc6017b638bb6af5c8da42247285e08b623\": rpc error: code = NotFound desc = could not find container \"76f6d35e6148e1ed7e1bfc323d651dc6017b638bb6af5c8da42247285e08b623\": container with ID starting with 76f6d35e6148e1ed7e1bfc323d651dc6017b638bb6af5c8da42247285e08b623 not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.427590 5003 scope.go:117] "RemoveContainer" containerID="f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.427829 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010"} err="failed to get container status \"f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010\": rpc error: code = NotFound desc = could not find container \"f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010\": container with ID starting with f6f6e150f09f0b817d6af18bb0ac35f23f1c72217392be252e6821d945f02010 not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.427857 5003 scope.go:117] "RemoveContainer" containerID="f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.428593 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039"} err="failed to get container status \"f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\": rpc error: code = NotFound desc = could not find container \"f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039\": container with ID starting with f97b2a586ea0cfd1b70d71d57fad41163e8aad32d32ec66120d5a28538725039 not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.428769 5003 scope.go:117] "RemoveContainer" containerID="65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.429060 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0"} err="failed to get container status \"65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\": rpc error: code = NotFound desc = could not find container \"65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0\": container with ID starting with 65ce6ef473aa0fde7b6bfbfe3a4f8ed4735f34ff48cbcd8eb29e450c13fb5ec0 not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.429089 5003 scope.go:117] "RemoveContainer" containerID="31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.429374 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac"} err="failed to get container status \"31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\": rpc error: code = NotFound desc = could not find container \"31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac\": container with ID starting with 31f43868b48adfa8143004cc460e76ad08a2ac048f02b7c610694d966ede7dac not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.429399 5003 scope.go:117] "RemoveContainer" containerID="0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.429740 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0"} err="failed to get container status \"0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\": rpc error: code = NotFound desc = could not find container \"0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0\": container with ID starting with 0cc09ed3e9c68f92553ce7305399d8f257c184c33740723776644dec901db6d0 not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.429804 5003 scope.go:117] "RemoveContainer" containerID="ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.430163 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58"} err="failed to get container status \"ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\": rpc error: code = NotFound desc = could not find container \"ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58\": container with ID starting with ead39a4e0015869e09f761ed517af2529d25749a23a950f287ca0a788ae69b58 not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.430194 5003 scope.go:117] "RemoveContainer" containerID="b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.430536 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910"} err="failed to get container status \"b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\": rpc error: code = NotFound desc = could not find container \"b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910\": container with ID starting with b4134d4392a4f170b6ce573facfb310425dad7568ee9de98634075cf62293910 not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.430562 5003 scope.go:117] "RemoveContainer" containerID="ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.430978 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509"} err="failed to get container status \"ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\": rpc error: code = NotFound desc = could not find container \"ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509\": container with ID starting with ecfd8e4c15fd486b9c192eb2769c929b0e7ee245983ed64ab0efe3b253afe509 not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.430998 5003 scope.go:117] "RemoveContainer" containerID="0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.431318 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b"} err="failed to get container status \"0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\": rpc error: code = NotFound desc = could not find container \"0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b\": container with ID starting with 0810b48ddf9f4ffbf800a011ba75f8afe2a67cbdc1eea7e34962a6132dd0c74b not found: ID does not exist" Jan 04 11:59:58 crc kubenswrapper[5003]: I0104 11:59:58.823355 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e40671d3-61d7-4a50-b4ea-a67e4005fc3f" path="/var/lib/kubelet/pods/e40671d3-61d7-4a50-b4ea-a67e4005fc3f/volumes" Jan 04 11:59:59 crc kubenswrapper[5003]: I0104 11:59:59.123939 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" event={"ID":"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23","Type":"ContainerStarted","Data":"d382db64cce55d7fb717282ab60a9325a767508334c69d0ac0525172cd1bcaed"} Jan 04 11:59:59 crc kubenswrapper[5003]: I0104 11:59:59.124108 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" event={"ID":"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23","Type":"ContainerStarted","Data":"96dd202039b26a7b9a23a981a1541352936f27334d09972ea00c86e6d44742a2"} Jan 04 11:59:59 crc kubenswrapper[5003]: I0104 11:59:59.124139 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" event={"ID":"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23","Type":"ContainerStarted","Data":"f3563a78585ff0b45c5594c03c47b365647f3c61b7508aa06784c03b858260a2"} Jan 04 11:59:59 crc kubenswrapper[5003]: I0104 11:59:59.124157 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" event={"ID":"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23","Type":"ContainerStarted","Data":"a4b36b017309b2f478b9202530631800d784aaa42a4bd42010730fc28bf7f3bb"} Jan 04 11:59:59 crc kubenswrapper[5003]: I0104 11:59:59.124173 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" event={"ID":"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23","Type":"ContainerStarted","Data":"563c698170d2d766b6878a53df2e99e558c4a5c571faa16d325be2ac20c4cd59"} Jan 04 11:59:59 crc kubenswrapper[5003]: I0104 11:59:59.124193 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" event={"ID":"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23","Type":"ContainerStarted","Data":"ede5d9ac3f5f2eea94a8c447ac6fb1a0b719157fa0cc669f2e92409cb94005bd"} Jan 04 12:00:00 crc kubenswrapper[5003]: I0104 12:00:00.194760 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d"] Jan 04 12:00:00 crc kubenswrapper[5003]: I0104 12:00:00.196297 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" Jan 04 12:00:00 crc kubenswrapper[5003]: I0104 12:00:00.198361 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 04 12:00:00 crc kubenswrapper[5003]: I0104 12:00:00.200045 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 04 12:00:00 crc kubenswrapper[5003]: I0104 12:00:00.302380 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmm8b\" (UniqueName: \"kubernetes.io/projected/4eeaa07e-7d61-40e2-a855-de1d1e337838-kube-api-access-dmm8b\") pod \"collect-profiles-29458800-bs74d\" (UID: \"4eeaa07e-7d61-40e2-a855-de1d1e337838\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" Jan 04 12:00:00 crc kubenswrapper[5003]: I0104 12:00:00.302444 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4eeaa07e-7d61-40e2-a855-de1d1e337838-config-volume\") pod \"collect-profiles-29458800-bs74d\" (UID: \"4eeaa07e-7d61-40e2-a855-de1d1e337838\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" Jan 04 12:00:00 crc kubenswrapper[5003]: I0104 12:00:00.302774 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4eeaa07e-7d61-40e2-a855-de1d1e337838-secret-volume\") pod \"collect-profiles-29458800-bs74d\" (UID: \"4eeaa07e-7d61-40e2-a855-de1d1e337838\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" Jan 04 12:00:00 crc kubenswrapper[5003]: I0104 12:00:00.403913 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4eeaa07e-7d61-40e2-a855-de1d1e337838-secret-volume\") pod \"collect-profiles-29458800-bs74d\" (UID: \"4eeaa07e-7d61-40e2-a855-de1d1e337838\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" Jan 04 12:00:00 crc kubenswrapper[5003]: I0104 12:00:00.403989 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmm8b\" (UniqueName: \"kubernetes.io/projected/4eeaa07e-7d61-40e2-a855-de1d1e337838-kube-api-access-dmm8b\") pod \"collect-profiles-29458800-bs74d\" (UID: \"4eeaa07e-7d61-40e2-a855-de1d1e337838\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" Jan 04 12:00:00 crc kubenswrapper[5003]: I0104 12:00:00.404073 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4eeaa07e-7d61-40e2-a855-de1d1e337838-config-volume\") pod \"collect-profiles-29458800-bs74d\" (UID: \"4eeaa07e-7d61-40e2-a855-de1d1e337838\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" Jan 04 12:00:00 crc kubenswrapper[5003]: I0104 12:00:00.405189 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4eeaa07e-7d61-40e2-a855-de1d1e337838-config-volume\") pod \"collect-profiles-29458800-bs74d\" (UID: \"4eeaa07e-7d61-40e2-a855-de1d1e337838\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" Jan 04 12:00:00 crc kubenswrapper[5003]: I0104 12:00:00.413606 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4eeaa07e-7d61-40e2-a855-de1d1e337838-secret-volume\") pod \"collect-profiles-29458800-bs74d\" (UID: \"4eeaa07e-7d61-40e2-a855-de1d1e337838\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" Jan 04 12:00:00 crc kubenswrapper[5003]: I0104 12:00:00.424671 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmm8b\" (UniqueName: \"kubernetes.io/projected/4eeaa07e-7d61-40e2-a855-de1d1e337838-kube-api-access-dmm8b\") pod \"collect-profiles-29458800-bs74d\" (UID: \"4eeaa07e-7d61-40e2-a855-de1d1e337838\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" Jan 04 12:00:00 crc kubenswrapper[5003]: I0104 12:00:00.513947 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" Jan 04 12:00:00 crc kubenswrapper[5003]: E0104 12:00:00.555594 5003 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29458800-bs74d_openshift-operator-lifecycle-manager_4eeaa07e-7d61-40e2-a855-de1d1e337838_0(977b47a117477b6db4f23d69c6893028aca67ab575ce187eb7182fc6edc16881): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 04 12:00:00 crc kubenswrapper[5003]: E0104 12:00:00.555740 5003 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29458800-bs74d_openshift-operator-lifecycle-manager_4eeaa07e-7d61-40e2-a855-de1d1e337838_0(977b47a117477b6db4f23d69c6893028aca67ab575ce187eb7182fc6edc16881): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" Jan 04 12:00:00 crc kubenswrapper[5003]: E0104 12:00:00.555783 5003 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29458800-bs74d_openshift-operator-lifecycle-manager_4eeaa07e-7d61-40e2-a855-de1d1e337838_0(977b47a117477b6db4f23d69c6893028aca67ab575ce187eb7182fc6edc16881): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" Jan 04 12:00:00 crc kubenswrapper[5003]: E0104 12:00:00.555894 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"collect-profiles-29458800-bs74d_openshift-operator-lifecycle-manager(4eeaa07e-7d61-40e2-a855-de1d1e337838)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"collect-profiles-29458800-bs74d_openshift-operator-lifecycle-manager(4eeaa07e-7d61-40e2-a855-de1d1e337838)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29458800-bs74d_openshift-operator-lifecycle-manager_4eeaa07e-7d61-40e2-a855-de1d1e337838_0(977b47a117477b6db4f23d69c6893028aca67ab575ce187eb7182fc6edc16881): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" podUID="4eeaa07e-7d61-40e2-a855-de1d1e337838" Jan 04 12:00:02 crc kubenswrapper[5003]: I0104 12:00:02.171133 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" event={"ID":"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23","Type":"ContainerStarted","Data":"698f8d5fbd7cd06aefc0c6aea3388e71a4ebdf9df9cfe2c4388d4014487ab334"} Jan 04 12:00:03 crc kubenswrapper[5003]: I0104 12:00:03.175894 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-sspn5"] Jan 04 12:00:03 crc kubenswrapper[5003]: I0104 12:00:03.176829 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-sspn5" Jan 04 12:00:03 crc kubenswrapper[5003]: I0104 12:00:03.179818 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 04 12:00:03 crc kubenswrapper[5003]: I0104 12:00:03.180141 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 04 12:00:03 crc kubenswrapper[5003]: I0104 12:00:03.180312 5003 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-mxrrk" Jan 04 12:00:03 crc kubenswrapper[5003]: I0104 12:00:03.183008 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 04 12:00:03 crc kubenswrapper[5003]: I0104 12:00:03.350877 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/c023dad7-7cb5-49b0-9405-bfeac57ff9ee-crc-storage\") pod \"crc-storage-crc-sspn5\" (UID: \"c023dad7-7cb5-49b0-9405-bfeac57ff9ee\") " pod="crc-storage/crc-storage-crc-sspn5" Jan 04 12:00:03 crc kubenswrapper[5003]: I0104 12:00:03.351539 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vllsx\" (UniqueName: \"kubernetes.io/projected/c023dad7-7cb5-49b0-9405-bfeac57ff9ee-kube-api-access-vllsx\") pod \"crc-storage-crc-sspn5\" (UID: \"c023dad7-7cb5-49b0-9405-bfeac57ff9ee\") " pod="crc-storage/crc-storage-crc-sspn5" Jan 04 12:00:03 crc kubenswrapper[5003]: I0104 12:00:03.351577 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/c023dad7-7cb5-49b0-9405-bfeac57ff9ee-node-mnt\") pod \"crc-storage-crc-sspn5\" (UID: \"c023dad7-7cb5-49b0-9405-bfeac57ff9ee\") " pod="crc-storage/crc-storage-crc-sspn5" Jan 04 12:00:03 crc kubenswrapper[5003]: I0104 12:00:03.452937 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/c023dad7-7cb5-49b0-9405-bfeac57ff9ee-node-mnt\") pod \"crc-storage-crc-sspn5\" (UID: \"c023dad7-7cb5-49b0-9405-bfeac57ff9ee\") " pod="crc-storage/crc-storage-crc-sspn5" Jan 04 12:00:03 crc kubenswrapper[5003]: I0104 12:00:03.453066 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/c023dad7-7cb5-49b0-9405-bfeac57ff9ee-crc-storage\") pod \"crc-storage-crc-sspn5\" (UID: \"c023dad7-7cb5-49b0-9405-bfeac57ff9ee\") " pod="crc-storage/crc-storage-crc-sspn5" Jan 04 12:00:03 crc kubenswrapper[5003]: I0104 12:00:03.453125 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vllsx\" (UniqueName: \"kubernetes.io/projected/c023dad7-7cb5-49b0-9405-bfeac57ff9ee-kube-api-access-vllsx\") pod \"crc-storage-crc-sspn5\" (UID: \"c023dad7-7cb5-49b0-9405-bfeac57ff9ee\") " pod="crc-storage/crc-storage-crc-sspn5" Jan 04 12:00:03 crc kubenswrapper[5003]: I0104 12:00:03.453655 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/c023dad7-7cb5-49b0-9405-bfeac57ff9ee-node-mnt\") pod \"crc-storage-crc-sspn5\" (UID: \"c023dad7-7cb5-49b0-9405-bfeac57ff9ee\") " pod="crc-storage/crc-storage-crc-sspn5" Jan 04 12:00:03 crc kubenswrapper[5003]: I0104 12:00:03.454144 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/c023dad7-7cb5-49b0-9405-bfeac57ff9ee-crc-storage\") pod \"crc-storage-crc-sspn5\" (UID: \"c023dad7-7cb5-49b0-9405-bfeac57ff9ee\") " pod="crc-storage/crc-storage-crc-sspn5" Jan 04 12:00:03 crc kubenswrapper[5003]: I0104 12:00:03.477414 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vllsx\" (UniqueName: \"kubernetes.io/projected/c023dad7-7cb5-49b0-9405-bfeac57ff9ee-kube-api-access-vllsx\") pod \"crc-storage-crc-sspn5\" (UID: \"c023dad7-7cb5-49b0-9405-bfeac57ff9ee\") " pod="crc-storage/crc-storage-crc-sspn5" Jan 04 12:00:03 crc kubenswrapper[5003]: I0104 12:00:03.498560 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-sspn5" Jan 04 12:00:03 crc kubenswrapper[5003]: E0104 12:00:03.522659 5003 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-sspn5_crc-storage_c023dad7-7cb5-49b0-9405-bfeac57ff9ee_0(1b454d8bf59c5d234069390e792733cb9a8f63a07a2153af8c70a5d43c486cce): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 04 12:00:03 crc kubenswrapper[5003]: E0104 12:00:03.522770 5003 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-sspn5_crc-storage_c023dad7-7cb5-49b0-9405-bfeac57ff9ee_0(1b454d8bf59c5d234069390e792733cb9a8f63a07a2153af8c70a5d43c486cce): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-sspn5" Jan 04 12:00:03 crc kubenswrapper[5003]: E0104 12:00:03.522808 5003 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-sspn5_crc-storage_c023dad7-7cb5-49b0-9405-bfeac57ff9ee_0(1b454d8bf59c5d234069390e792733cb9a8f63a07a2153af8c70a5d43c486cce): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-sspn5" Jan 04 12:00:03 crc kubenswrapper[5003]: E0104 12:00:03.522885 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-sspn5_crc-storage(c023dad7-7cb5-49b0-9405-bfeac57ff9ee)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-sspn5_crc-storage(c023dad7-7cb5-49b0-9405-bfeac57ff9ee)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-sspn5_crc-storage_c023dad7-7cb5-49b0-9405-bfeac57ff9ee_0(1b454d8bf59c5d234069390e792733cb9a8f63a07a2153af8c70a5d43c486cce): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-sspn5" podUID="c023dad7-7cb5-49b0-9405-bfeac57ff9ee" Jan 04 12:00:04 crc kubenswrapper[5003]: I0104 12:00:04.191931 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" event={"ID":"cd4dd53a-69b5-49aa-868a-6bbc3cc61a23","Type":"ContainerStarted","Data":"ad96cb18f11d911496c5589c749839e95ced9576d5bb9e7daa96eaf7c0ae15e5"} Jan 04 12:00:04 crc kubenswrapper[5003]: I0104 12:00:04.192479 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 12:00:04 crc kubenswrapper[5003]: I0104 12:00:04.192495 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 12:00:04 crc kubenswrapper[5003]: I0104 12:00:04.222742 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" podStartSLOduration=7.222720295 podStartE2EDuration="7.222720295s" podCreationTimestamp="2026-01-04 11:59:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:00:04.21954371 +0000 UTC m=+719.692573591" watchObservedRunningTime="2026-01-04 12:00:04.222720295 +0000 UTC m=+719.695750156" Jan 04 12:00:04 crc kubenswrapper[5003]: I0104 12:00:04.228682 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 12:00:04 crc kubenswrapper[5003]: I0104 12:00:04.316979 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d"] Jan 04 12:00:04 crc kubenswrapper[5003]: I0104 12:00:04.317150 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" Jan 04 12:00:04 crc kubenswrapper[5003]: I0104 12:00:04.317674 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" Jan 04 12:00:04 crc kubenswrapper[5003]: I0104 12:00:04.329398 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-sspn5"] Jan 04 12:00:04 crc kubenswrapper[5003]: I0104 12:00:04.329823 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-sspn5" Jan 04 12:00:04 crc kubenswrapper[5003]: I0104 12:00:04.330382 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-sspn5" Jan 04 12:00:04 crc kubenswrapper[5003]: E0104 12:00:04.356371 5003 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29458800-bs74d_openshift-operator-lifecycle-manager_4eeaa07e-7d61-40e2-a855-de1d1e337838_0(9a2fac876e5c2c91b748b30dee289abca75c4a49bb632bd49e1b11edbed29751): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 04 12:00:04 crc kubenswrapper[5003]: E0104 12:00:04.356453 5003 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29458800-bs74d_openshift-operator-lifecycle-manager_4eeaa07e-7d61-40e2-a855-de1d1e337838_0(9a2fac876e5c2c91b748b30dee289abca75c4a49bb632bd49e1b11edbed29751): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" Jan 04 12:00:04 crc kubenswrapper[5003]: E0104 12:00:04.356520 5003 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29458800-bs74d_openshift-operator-lifecycle-manager_4eeaa07e-7d61-40e2-a855-de1d1e337838_0(9a2fac876e5c2c91b748b30dee289abca75c4a49bb632bd49e1b11edbed29751): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" Jan 04 12:00:04 crc kubenswrapper[5003]: E0104 12:00:04.356575 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"collect-profiles-29458800-bs74d_openshift-operator-lifecycle-manager(4eeaa07e-7d61-40e2-a855-de1d1e337838)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"collect-profiles-29458800-bs74d_openshift-operator-lifecycle-manager(4eeaa07e-7d61-40e2-a855-de1d1e337838)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29458800-bs74d_openshift-operator-lifecycle-manager_4eeaa07e-7d61-40e2-a855-de1d1e337838_0(9a2fac876e5c2c91b748b30dee289abca75c4a49bb632bd49e1b11edbed29751): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" podUID="4eeaa07e-7d61-40e2-a855-de1d1e337838" Jan 04 12:00:04 crc kubenswrapper[5003]: E0104 12:00:04.373997 5003 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-sspn5_crc-storage_c023dad7-7cb5-49b0-9405-bfeac57ff9ee_0(88facd980e573543da75df1cb46c773b7ff4db7eaaf8d4b7c2c82e4922c16f1c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 04 12:00:04 crc kubenswrapper[5003]: E0104 12:00:04.374084 5003 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-sspn5_crc-storage_c023dad7-7cb5-49b0-9405-bfeac57ff9ee_0(88facd980e573543da75df1cb46c773b7ff4db7eaaf8d4b7c2c82e4922c16f1c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-sspn5" Jan 04 12:00:04 crc kubenswrapper[5003]: E0104 12:00:04.374108 5003 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-sspn5_crc-storage_c023dad7-7cb5-49b0-9405-bfeac57ff9ee_0(88facd980e573543da75df1cb46c773b7ff4db7eaaf8d4b7c2c82e4922c16f1c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-sspn5" Jan 04 12:00:04 crc kubenswrapper[5003]: E0104 12:00:04.374166 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-sspn5_crc-storage(c023dad7-7cb5-49b0-9405-bfeac57ff9ee)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-sspn5_crc-storage(c023dad7-7cb5-49b0-9405-bfeac57ff9ee)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-sspn5_crc-storage_c023dad7-7cb5-49b0-9405-bfeac57ff9ee_0(88facd980e573543da75df1cb46c773b7ff4db7eaaf8d4b7c2c82e4922c16f1c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-sspn5" podUID="c023dad7-7cb5-49b0-9405-bfeac57ff9ee" Jan 04 12:00:05 crc kubenswrapper[5003]: I0104 12:00:05.199682 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 12:00:05 crc kubenswrapper[5003]: I0104 12:00:05.228666 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 12:00:07 crc kubenswrapper[5003]: I0104 12:00:07.807540 5003 scope.go:117] "RemoveContainer" containerID="db992e1ff8ee746d747f10a8b6c8a30b540e1efe5581e111fa25a5cd5467774d" Jan 04 12:00:07 crc kubenswrapper[5003]: E0104 12:00:07.809282 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-np5qh_openshift-multus(6e5d41d8-142e-4ca3-a20a-f6d338aaddf2)\"" pod="openshift-multus/multus-np5qh" podUID="6e5d41d8-142e-4ca3-a20a-f6d338aaddf2" Jan 04 12:00:09 crc kubenswrapper[5003]: I0104 12:00:09.419053 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:00:09 crc kubenswrapper[5003]: I0104 12:00:09.419188 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:00:16 crc kubenswrapper[5003]: I0104 12:00:16.806863 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" Jan 04 12:00:16 crc kubenswrapper[5003]: I0104 12:00:16.808906 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" Jan 04 12:00:16 crc kubenswrapper[5003]: E0104 12:00:16.856249 5003 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29458800-bs74d_openshift-operator-lifecycle-manager_4eeaa07e-7d61-40e2-a855-de1d1e337838_0(960aae1aa350fab6048830a5a5e7805d8ba1c040f18e91ad41f9228872ebf4a9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 04 12:00:16 crc kubenswrapper[5003]: E0104 12:00:16.856331 5003 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29458800-bs74d_openshift-operator-lifecycle-manager_4eeaa07e-7d61-40e2-a855-de1d1e337838_0(960aae1aa350fab6048830a5a5e7805d8ba1c040f18e91ad41f9228872ebf4a9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" Jan 04 12:00:16 crc kubenswrapper[5003]: E0104 12:00:16.856361 5003 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29458800-bs74d_openshift-operator-lifecycle-manager_4eeaa07e-7d61-40e2-a855-de1d1e337838_0(960aae1aa350fab6048830a5a5e7805d8ba1c040f18e91ad41f9228872ebf4a9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" Jan 04 12:00:16 crc kubenswrapper[5003]: E0104 12:00:16.856423 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"collect-profiles-29458800-bs74d_openshift-operator-lifecycle-manager(4eeaa07e-7d61-40e2-a855-de1d1e337838)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"collect-profiles-29458800-bs74d_openshift-operator-lifecycle-manager(4eeaa07e-7d61-40e2-a855-de1d1e337838)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_collect-profiles-29458800-bs74d_openshift-operator-lifecycle-manager_4eeaa07e-7d61-40e2-a855-de1d1e337838_0(960aae1aa350fab6048830a5a5e7805d8ba1c040f18e91ad41f9228872ebf4a9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" podUID="4eeaa07e-7d61-40e2-a855-de1d1e337838" Jan 04 12:00:17 crc kubenswrapper[5003]: I0104 12:00:17.807833 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-sspn5" Jan 04 12:00:17 crc kubenswrapper[5003]: I0104 12:00:17.809244 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-sspn5" Jan 04 12:00:17 crc kubenswrapper[5003]: E0104 12:00:17.861339 5003 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-sspn5_crc-storage_c023dad7-7cb5-49b0-9405-bfeac57ff9ee_0(8f0de86eb6da86896af228fab03a04587f88109756b19125d8011c3e236f820f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 04 12:00:17 crc kubenswrapper[5003]: E0104 12:00:17.861437 5003 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-sspn5_crc-storage_c023dad7-7cb5-49b0-9405-bfeac57ff9ee_0(8f0de86eb6da86896af228fab03a04587f88109756b19125d8011c3e236f820f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-sspn5" Jan 04 12:00:17 crc kubenswrapper[5003]: E0104 12:00:17.861478 5003 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-sspn5_crc-storage_c023dad7-7cb5-49b0-9405-bfeac57ff9ee_0(8f0de86eb6da86896af228fab03a04587f88109756b19125d8011c3e236f820f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-sspn5" Jan 04 12:00:17 crc kubenswrapper[5003]: E0104 12:00:17.861543 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-sspn5_crc-storage(c023dad7-7cb5-49b0-9405-bfeac57ff9ee)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-sspn5_crc-storage(c023dad7-7cb5-49b0-9405-bfeac57ff9ee)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-sspn5_crc-storage_c023dad7-7cb5-49b0-9405-bfeac57ff9ee_0(8f0de86eb6da86896af228fab03a04587f88109756b19125d8011c3e236f820f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-sspn5" podUID="c023dad7-7cb5-49b0-9405-bfeac57ff9ee" Jan 04 12:00:20 crc kubenswrapper[5003]: I0104 12:00:20.807792 5003 scope.go:117] "RemoveContainer" containerID="db992e1ff8ee746d747f10a8b6c8a30b540e1efe5581e111fa25a5cd5467774d" Jan 04 12:00:21 crc kubenswrapper[5003]: I0104 12:00:21.324908 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-np5qh_6e5d41d8-142e-4ca3-a20a-f6d338aaddf2/kube-multus/2.log" Jan 04 12:00:21 crc kubenswrapper[5003]: I0104 12:00:21.325363 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-np5qh" event={"ID":"6e5d41d8-142e-4ca3-a20a-f6d338aaddf2","Type":"ContainerStarted","Data":"6479dafee9e0cb632440f167d0ea5596cbdeaef9c0965b6c8cf3f0436974b22f"} Jan 04 12:00:27 crc kubenswrapper[5003]: I0104 12:00:27.661443 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-w22h9" Jan 04 12:00:29 crc kubenswrapper[5003]: I0104 12:00:29.805888 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-sspn5" Jan 04 12:00:29 crc kubenswrapper[5003]: I0104 12:00:29.807064 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-sspn5" Jan 04 12:00:30 crc kubenswrapper[5003]: I0104 12:00:30.095920 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-sspn5"] Jan 04 12:00:30 crc kubenswrapper[5003]: I0104 12:00:30.107375 5003 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 04 12:00:30 crc kubenswrapper[5003]: I0104 12:00:30.387538 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-sspn5" event={"ID":"c023dad7-7cb5-49b0-9405-bfeac57ff9ee","Type":"ContainerStarted","Data":"a157960b816a49197dfe98f0568408f17538e5c3c3140a600e57b822b373052f"} Jan 04 12:00:30 crc kubenswrapper[5003]: I0104 12:00:30.806701 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" Jan 04 12:00:30 crc kubenswrapper[5003]: I0104 12:00:30.807289 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" Jan 04 12:00:31 crc kubenswrapper[5003]: I0104 12:00:31.068342 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d"] Jan 04 12:00:31 crc kubenswrapper[5003]: W0104 12:00:31.074632 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4eeaa07e_7d61_40e2_a855_de1d1e337838.slice/crio-11ab8db53849078ca92a2769f6ed83acc8c89f6844bde031dc7d785716fe56dc WatchSource:0}: Error finding container 11ab8db53849078ca92a2769f6ed83acc8c89f6844bde031dc7d785716fe56dc: Status 404 returned error can't find the container with id 11ab8db53849078ca92a2769f6ed83acc8c89f6844bde031dc7d785716fe56dc Jan 04 12:00:31 crc kubenswrapper[5003]: I0104 12:00:31.397349 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" event={"ID":"4eeaa07e-7d61-40e2-a855-de1d1e337838","Type":"ContainerStarted","Data":"1540e3f3ff17ed81708c703ab7eafa75d81aaf990f596cb4dc9d43026651f818"} Jan 04 12:00:31 crc kubenswrapper[5003]: I0104 12:00:31.397974 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" event={"ID":"4eeaa07e-7d61-40e2-a855-de1d1e337838","Type":"ContainerStarted","Data":"11ab8db53849078ca92a2769f6ed83acc8c89f6844bde031dc7d785716fe56dc"} Jan 04 12:00:31 crc kubenswrapper[5003]: I0104 12:00:31.424273 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" podStartSLOduration=31.424198351 podStartE2EDuration="31.424198351s" podCreationTimestamp="2026-01-04 12:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:00:31.416617949 +0000 UTC m=+746.889647870" watchObservedRunningTime="2026-01-04 12:00:31.424198351 +0000 UTC m=+746.897228242" Jan 04 12:00:32 crc kubenswrapper[5003]: I0104 12:00:32.405139 5003 generic.go:334] "Generic (PLEG): container finished" podID="4eeaa07e-7d61-40e2-a855-de1d1e337838" containerID="1540e3f3ff17ed81708c703ab7eafa75d81aaf990f596cb4dc9d43026651f818" exitCode=0 Jan 04 12:00:32 crc kubenswrapper[5003]: I0104 12:00:32.405216 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" event={"ID":"4eeaa07e-7d61-40e2-a855-de1d1e337838","Type":"ContainerDied","Data":"1540e3f3ff17ed81708c703ab7eafa75d81aaf990f596cb4dc9d43026651f818"} Jan 04 12:00:33 crc kubenswrapper[5003]: I0104 12:00:33.413239 5003 generic.go:334] "Generic (PLEG): container finished" podID="c023dad7-7cb5-49b0-9405-bfeac57ff9ee" containerID="cd654310bb0361ddff25b2085b912c3b7be2e424ecec5784f666dfb25191e4c1" exitCode=0 Jan 04 12:00:33 crc kubenswrapper[5003]: I0104 12:00:33.413358 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-sspn5" event={"ID":"c023dad7-7cb5-49b0-9405-bfeac57ff9ee","Type":"ContainerDied","Data":"cd654310bb0361ddff25b2085b912c3b7be2e424ecec5784f666dfb25191e4c1"} Jan 04 12:00:33 crc kubenswrapper[5003]: I0104 12:00:33.635819 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" Jan 04 12:00:33 crc kubenswrapper[5003]: I0104 12:00:33.750992 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4eeaa07e-7d61-40e2-a855-de1d1e337838-secret-volume\") pod \"4eeaa07e-7d61-40e2-a855-de1d1e337838\" (UID: \"4eeaa07e-7d61-40e2-a855-de1d1e337838\") " Jan 04 12:00:33 crc kubenswrapper[5003]: I0104 12:00:33.751095 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4eeaa07e-7d61-40e2-a855-de1d1e337838-config-volume\") pod \"4eeaa07e-7d61-40e2-a855-de1d1e337838\" (UID: \"4eeaa07e-7d61-40e2-a855-de1d1e337838\") " Jan 04 12:00:33 crc kubenswrapper[5003]: I0104 12:00:33.751115 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmm8b\" (UniqueName: \"kubernetes.io/projected/4eeaa07e-7d61-40e2-a855-de1d1e337838-kube-api-access-dmm8b\") pod \"4eeaa07e-7d61-40e2-a855-de1d1e337838\" (UID: \"4eeaa07e-7d61-40e2-a855-de1d1e337838\") " Jan 04 12:00:33 crc kubenswrapper[5003]: I0104 12:00:33.752209 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4eeaa07e-7d61-40e2-a855-de1d1e337838-config-volume" (OuterVolumeSpecName: "config-volume") pod "4eeaa07e-7d61-40e2-a855-de1d1e337838" (UID: "4eeaa07e-7d61-40e2-a855-de1d1e337838"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:00:33 crc kubenswrapper[5003]: I0104 12:00:33.760223 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4eeaa07e-7d61-40e2-a855-de1d1e337838-kube-api-access-dmm8b" (OuterVolumeSpecName: "kube-api-access-dmm8b") pod "4eeaa07e-7d61-40e2-a855-de1d1e337838" (UID: "4eeaa07e-7d61-40e2-a855-de1d1e337838"). InnerVolumeSpecName "kube-api-access-dmm8b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:00:33 crc kubenswrapper[5003]: I0104 12:00:33.761193 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4eeaa07e-7d61-40e2-a855-de1d1e337838-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4eeaa07e-7d61-40e2-a855-de1d1e337838" (UID: "4eeaa07e-7d61-40e2-a855-de1d1e337838"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:00:33 crc kubenswrapper[5003]: I0104 12:00:33.853077 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmm8b\" (UniqueName: \"kubernetes.io/projected/4eeaa07e-7d61-40e2-a855-de1d1e337838-kube-api-access-dmm8b\") on node \"crc\" DevicePath \"\"" Jan 04 12:00:33 crc kubenswrapper[5003]: I0104 12:00:33.853126 5003 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4eeaa07e-7d61-40e2-a855-de1d1e337838-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 04 12:00:33 crc kubenswrapper[5003]: I0104 12:00:33.853142 5003 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4eeaa07e-7d61-40e2-a855-de1d1e337838-config-volume\") on node \"crc\" DevicePath \"\"" Jan 04 12:00:34 crc kubenswrapper[5003]: I0104 12:00:34.424168 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" Jan 04 12:00:34 crc kubenswrapper[5003]: I0104 12:00:34.425205 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d" event={"ID":"4eeaa07e-7d61-40e2-a855-de1d1e337838","Type":"ContainerDied","Data":"11ab8db53849078ca92a2769f6ed83acc8c89f6844bde031dc7d785716fe56dc"} Jan 04 12:00:34 crc kubenswrapper[5003]: I0104 12:00:34.425263 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="11ab8db53849078ca92a2769f6ed83acc8c89f6844bde031dc7d785716fe56dc" Jan 04 12:00:34 crc kubenswrapper[5003]: I0104 12:00:34.812324 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-sspn5" Jan 04 12:00:34 crc kubenswrapper[5003]: I0104 12:00:34.970239 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/c023dad7-7cb5-49b0-9405-bfeac57ff9ee-node-mnt\") pod \"c023dad7-7cb5-49b0-9405-bfeac57ff9ee\" (UID: \"c023dad7-7cb5-49b0-9405-bfeac57ff9ee\") " Jan 04 12:00:34 crc kubenswrapper[5003]: I0104 12:00:34.970344 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c023dad7-7cb5-49b0-9405-bfeac57ff9ee-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "c023dad7-7cb5-49b0-9405-bfeac57ff9ee" (UID: "c023dad7-7cb5-49b0-9405-bfeac57ff9ee"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:00:34 crc kubenswrapper[5003]: I0104 12:00:34.970541 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/c023dad7-7cb5-49b0-9405-bfeac57ff9ee-crc-storage\") pod \"c023dad7-7cb5-49b0-9405-bfeac57ff9ee\" (UID: \"c023dad7-7cb5-49b0-9405-bfeac57ff9ee\") " Jan 04 12:00:34 crc kubenswrapper[5003]: I0104 12:00:34.970622 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vllsx\" (UniqueName: \"kubernetes.io/projected/c023dad7-7cb5-49b0-9405-bfeac57ff9ee-kube-api-access-vllsx\") pod \"c023dad7-7cb5-49b0-9405-bfeac57ff9ee\" (UID: \"c023dad7-7cb5-49b0-9405-bfeac57ff9ee\") " Jan 04 12:00:34 crc kubenswrapper[5003]: I0104 12:00:34.970999 5003 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/c023dad7-7cb5-49b0-9405-bfeac57ff9ee-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 04 12:00:34 crc kubenswrapper[5003]: I0104 12:00:34.974891 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c023dad7-7cb5-49b0-9405-bfeac57ff9ee-kube-api-access-vllsx" (OuterVolumeSpecName: "kube-api-access-vllsx") pod "c023dad7-7cb5-49b0-9405-bfeac57ff9ee" (UID: "c023dad7-7cb5-49b0-9405-bfeac57ff9ee"). InnerVolumeSpecName "kube-api-access-vllsx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:00:34 crc kubenswrapper[5003]: I0104 12:00:34.996991 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c023dad7-7cb5-49b0-9405-bfeac57ff9ee-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "c023dad7-7cb5-49b0-9405-bfeac57ff9ee" (UID: "c023dad7-7cb5-49b0-9405-bfeac57ff9ee"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:00:35 crc kubenswrapper[5003]: I0104 12:00:35.073286 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vllsx\" (UniqueName: \"kubernetes.io/projected/c023dad7-7cb5-49b0-9405-bfeac57ff9ee-kube-api-access-vllsx\") on node \"crc\" DevicePath \"\"" Jan 04 12:00:35 crc kubenswrapper[5003]: I0104 12:00:35.073331 5003 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/c023dad7-7cb5-49b0-9405-bfeac57ff9ee-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 04 12:00:35 crc kubenswrapper[5003]: I0104 12:00:35.434286 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-sspn5" event={"ID":"c023dad7-7cb5-49b0-9405-bfeac57ff9ee","Type":"ContainerDied","Data":"a157960b816a49197dfe98f0568408f17538e5c3c3140a600e57b822b373052f"} Jan 04 12:00:35 crc kubenswrapper[5003]: I0104 12:00:35.434333 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a157960b816a49197dfe98f0568408f17538e5c3c3140a600e57b822b373052f" Jan 04 12:00:35 crc kubenswrapper[5003]: I0104 12:00:35.434453 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-sspn5" Jan 04 12:00:36 crc kubenswrapper[5003]: I0104 12:00:36.255035 5003 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 04 12:00:39 crc kubenswrapper[5003]: I0104 12:00:39.418767 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:00:39 crc kubenswrapper[5003]: I0104 12:00:39.420355 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:00:43 crc kubenswrapper[5003]: I0104 12:00:43.714929 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj"] Jan 04 12:00:43 crc kubenswrapper[5003]: E0104 12:00:43.715476 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c023dad7-7cb5-49b0-9405-bfeac57ff9ee" containerName="storage" Jan 04 12:00:43 crc kubenswrapper[5003]: I0104 12:00:43.715490 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c023dad7-7cb5-49b0-9405-bfeac57ff9ee" containerName="storage" Jan 04 12:00:43 crc kubenswrapper[5003]: E0104 12:00:43.715500 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4eeaa07e-7d61-40e2-a855-de1d1e337838" containerName="collect-profiles" Jan 04 12:00:43 crc kubenswrapper[5003]: I0104 12:00:43.715507 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="4eeaa07e-7d61-40e2-a855-de1d1e337838" containerName="collect-profiles" Jan 04 12:00:43 crc kubenswrapper[5003]: I0104 12:00:43.715597 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="c023dad7-7cb5-49b0-9405-bfeac57ff9ee" containerName="storage" Jan 04 12:00:43 crc kubenswrapper[5003]: I0104 12:00:43.715608 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="4eeaa07e-7d61-40e2-a855-de1d1e337838" containerName="collect-profiles" Jan 04 12:00:43 crc kubenswrapper[5003]: I0104 12:00:43.716272 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj" Jan 04 12:00:43 crc kubenswrapper[5003]: I0104 12:00:43.719266 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 04 12:00:43 crc kubenswrapper[5003]: I0104 12:00:43.728350 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj"] Jan 04 12:00:43 crc kubenswrapper[5003]: I0104 12:00:43.898812 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d72d2173-7ced-4d5d-bd95-ee65cbf3ee66-util\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj\" (UID: \"d72d2173-7ced-4d5d-bd95-ee65cbf3ee66\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj" Jan 04 12:00:43 crc kubenswrapper[5003]: I0104 12:00:43.898928 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d72d2173-7ced-4d5d-bd95-ee65cbf3ee66-bundle\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj\" (UID: \"d72d2173-7ced-4d5d-bd95-ee65cbf3ee66\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj" Jan 04 12:00:43 crc kubenswrapper[5003]: I0104 12:00:43.899185 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59tsh\" (UniqueName: \"kubernetes.io/projected/d72d2173-7ced-4d5d-bd95-ee65cbf3ee66-kube-api-access-59tsh\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj\" (UID: \"d72d2173-7ced-4d5d-bd95-ee65cbf3ee66\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj" Jan 04 12:00:44 crc kubenswrapper[5003]: I0104 12:00:44.001555 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d72d2173-7ced-4d5d-bd95-ee65cbf3ee66-bundle\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj\" (UID: \"d72d2173-7ced-4d5d-bd95-ee65cbf3ee66\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj" Jan 04 12:00:44 crc kubenswrapper[5003]: I0104 12:00:44.001729 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59tsh\" (UniqueName: \"kubernetes.io/projected/d72d2173-7ced-4d5d-bd95-ee65cbf3ee66-kube-api-access-59tsh\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj\" (UID: \"d72d2173-7ced-4d5d-bd95-ee65cbf3ee66\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj" Jan 04 12:00:44 crc kubenswrapper[5003]: I0104 12:00:44.001861 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d72d2173-7ced-4d5d-bd95-ee65cbf3ee66-util\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj\" (UID: \"d72d2173-7ced-4d5d-bd95-ee65cbf3ee66\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj" Jan 04 12:00:44 crc kubenswrapper[5003]: I0104 12:00:44.002644 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d72d2173-7ced-4d5d-bd95-ee65cbf3ee66-bundle\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj\" (UID: \"d72d2173-7ced-4d5d-bd95-ee65cbf3ee66\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj" Jan 04 12:00:44 crc kubenswrapper[5003]: I0104 12:00:44.002740 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d72d2173-7ced-4d5d-bd95-ee65cbf3ee66-util\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj\" (UID: \"d72d2173-7ced-4d5d-bd95-ee65cbf3ee66\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj" Jan 04 12:00:44 crc kubenswrapper[5003]: I0104 12:00:44.038895 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59tsh\" (UniqueName: \"kubernetes.io/projected/d72d2173-7ced-4d5d-bd95-ee65cbf3ee66-kube-api-access-59tsh\") pod \"98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj\" (UID: \"d72d2173-7ced-4d5d-bd95-ee65cbf3ee66\") " pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj" Jan 04 12:00:44 crc kubenswrapper[5003]: I0104 12:00:44.072227 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj" Jan 04 12:00:44 crc kubenswrapper[5003]: I0104 12:00:44.351554 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj"] Jan 04 12:00:44 crc kubenswrapper[5003]: I0104 12:00:44.495332 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj" event={"ID":"d72d2173-7ced-4d5d-bd95-ee65cbf3ee66","Type":"ContainerStarted","Data":"460071060b19a6cf1549537ff6c90221014c0ca6b3f5f5b75cd6a37994689033"} Jan 04 12:00:45 crc kubenswrapper[5003]: I0104 12:00:45.228119 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mx9zj"] Jan 04 12:00:45 crc kubenswrapper[5003]: I0104 12:00:45.231658 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mx9zj" Jan 04 12:00:45 crc kubenswrapper[5003]: I0104 12:00:45.240904 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mx9zj"] Jan 04 12:00:45 crc kubenswrapper[5003]: I0104 12:00:45.425139 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hngkc\" (UniqueName: \"kubernetes.io/projected/410b4d7a-8f19-4fc6-9f31-f67d2036af04-kube-api-access-hngkc\") pod \"redhat-operators-mx9zj\" (UID: \"410b4d7a-8f19-4fc6-9f31-f67d2036af04\") " pod="openshift-marketplace/redhat-operators-mx9zj" Jan 04 12:00:45 crc kubenswrapper[5003]: I0104 12:00:45.425188 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/410b4d7a-8f19-4fc6-9f31-f67d2036af04-utilities\") pod \"redhat-operators-mx9zj\" (UID: \"410b4d7a-8f19-4fc6-9f31-f67d2036af04\") " pod="openshift-marketplace/redhat-operators-mx9zj" Jan 04 12:00:45 crc kubenswrapper[5003]: I0104 12:00:45.425332 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/410b4d7a-8f19-4fc6-9f31-f67d2036af04-catalog-content\") pod \"redhat-operators-mx9zj\" (UID: \"410b4d7a-8f19-4fc6-9f31-f67d2036af04\") " pod="openshift-marketplace/redhat-operators-mx9zj" Jan 04 12:00:45 crc kubenswrapper[5003]: I0104 12:00:45.526001 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/410b4d7a-8f19-4fc6-9f31-f67d2036af04-catalog-content\") pod \"redhat-operators-mx9zj\" (UID: \"410b4d7a-8f19-4fc6-9f31-f67d2036af04\") " pod="openshift-marketplace/redhat-operators-mx9zj" Jan 04 12:00:45 crc kubenswrapper[5003]: I0104 12:00:45.526071 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hngkc\" (UniqueName: \"kubernetes.io/projected/410b4d7a-8f19-4fc6-9f31-f67d2036af04-kube-api-access-hngkc\") pod \"redhat-operators-mx9zj\" (UID: \"410b4d7a-8f19-4fc6-9f31-f67d2036af04\") " pod="openshift-marketplace/redhat-operators-mx9zj" Jan 04 12:00:45 crc kubenswrapper[5003]: I0104 12:00:45.526113 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/410b4d7a-8f19-4fc6-9f31-f67d2036af04-utilities\") pod \"redhat-operators-mx9zj\" (UID: \"410b4d7a-8f19-4fc6-9f31-f67d2036af04\") " pod="openshift-marketplace/redhat-operators-mx9zj" Jan 04 12:00:45 crc kubenswrapper[5003]: I0104 12:00:45.526713 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/410b4d7a-8f19-4fc6-9f31-f67d2036af04-utilities\") pod \"redhat-operators-mx9zj\" (UID: \"410b4d7a-8f19-4fc6-9f31-f67d2036af04\") " pod="openshift-marketplace/redhat-operators-mx9zj" Jan 04 12:00:45 crc kubenswrapper[5003]: I0104 12:00:45.526754 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/410b4d7a-8f19-4fc6-9f31-f67d2036af04-catalog-content\") pod \"redhat-operators-mx9zj\" (UID: \"410b4d7a-8f19-4fc6-9f31-f67d2036af04\") " pod="openshift-marketplace/redhat-operators-mx9zj" Jan 04 12:00:45 crc kubenswrapper[5003]: I0104 12:00:45.546129 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hngkc\" (UniqueName: \"kubernetes.io/projected/410b4d7a-8f19-4fc6-9f31-f67d2036af04-kube-api-access-hngkc\") pod \"redhat-operators-mx9zj\" (UID: \"410b4d7a-8f19-4fc6-9f31-f67d2036af04\") " pod="openshift-marketplace/redhat-operators-mx9zj" Jan 04 12:00:45 crc kubenswrapper[5003]: I0104 12:00:45.610543 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mx9zj" Jan 04 12:00:45 crc kubenswrapper[5003]: I0104 12:00:45.839028 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mx9zj"] Jan 04 12:00:45 crc kubenswrapper[5003]: W0104 12:00:45.844068 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod410b4d7a_8f19_4fc6_9f31_f67d2036af04.slice/crio-012fe0fcc7875a3f670e41a8060d7a63a0d6b25de6a30e46ed2872b8ee02fdbb WatchSource:0}: Error finding container 012fe0fcc7875a3f670e41a8060d7a63a0d6b25de6a30e46ed2872b8ee02fdbb: Status 404 returned error can't find the container with id 012fe0fcc7875a3f670e41a8060d7a63a0d6b25de6a30e46ed2872b8ee02fdbb Jan 04 12:00:46 crc kubenswrapper[5003]: I0104 12:00:46.507429 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj" event={"ID":"d72d2173-7ced-4d5d-bd95-ee65cbf3ee66","Type":"ContainerStarted","Data":"c1de5b410dafe725339f639d5812970ebeae99aafc68f08c0a3e9639ea51399e"} Jan 04 12:00:46 crc kubenswrapper[5003]: I0104 12:00:46.510252 5003 generic.go:334] "Generic (PLEG): container finished" podID="410b4d7a-8f19-4fc6-9f31-f67d2036af04" containerID="e92cd2874783249c88a9e95d03e1d445f4231d33d7daf222abb4aa0e0366b15c" exitCode=0 Jan 04 12:00:46 crc kubenswrapper[5003]: I0104 12:00:46.510316 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mx9zj" event={"ID":"410b4d7a-8f19-4fc6-9f31-f67d2036af04","Type":"ContainerDied","Data":"e92cd2874783249c88a9e95d03e1d445f4231d33d7daf222abb4aa0e0366b15c"} Jan 04 12:00:46 crc kubenswrapper[5003]: I0104 12:00:46.510343 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mx9zj" event={"ID":"410b4d7a-8f19-4fc6-9f31-f67d2036af04","Type":"ContainerStarted","Data":"012fe0fcc7875a3f670e41a8060d7a63a0d6b25de6a30e46ed2872b8ee02fdbb"} Jan 04 12:00:47 crc kubenswrapper[5003]: I0104 12:00:47.519685 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mx9zj" event={"ID":"410b4d7a-8f19-4fc6-9f31-f67d2036af04","Type":"ContainerStarted","Data":"289f660221a0766eddb64be76c488b789204fa0296cdf508831f08bc016bc974"} Jan 04 12:00:47 crc kubenswrapper[5003]: I0104 12:00:47.522724 5003 generic.go:334] "Generic (PLEG): container finished" podID="d72d2173-7ced-4d5d-bd95-ee65cbf3ee66" containerID="c1de5b410dafe725339f639d5812970ebeae99aafc68f08c0a3e9639ea51399e" exitCode=0 Jan 04 12:00:47 crc kubenswrapper[5003]: I0104 12:00:47.522776 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj" event={"ID":"d72d2173-7ced-4d5d-bd95-ee65cbf3ee66","Type":"ContainerDied","Data":"c1de5b410dafe725339f639d5812970ebeae99aafc68f08c0a3e9639ea51399e"} Jan 04 12:00:48 crc kubenswrapper[5003]: I0104 12:00:48.537274 5003 generic.go:334] "Generic (PLEG): container finished" podID="410b4d7a-8f19-4fc6-9f31-f67d2036af04" containerID="289f660221a0766eddb64be76c488b789204fa0296cdf508831f08bc016bc974" exitCode=0 Jan 04 12:00:48 crc kubenswrapper[5003]: I0104 12:00:48.537384 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mx9zj" event={"ID":"410b4d7a-8f19-4fc6-9f31-f67d2036af04","Type":"ContainerDied","Data":"289f660221a0766eddb64be76c488b789204fa0296cdf508831f08bc016bc974"} Jan 04 12:00:49 crc kubenswrapper[5003]: I0104 12:00:49.547886 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mx9zj" event={"ID":"410b4d7a-8f19-4fc6-9f31-f67d2036af04","Type":"ContainerStarted","Data":"ad88535f19806b5a9f2910e22b7f1fa1f1dd77791250f92f0b808751828d8b48"} Jan 04 12:00:49 crc kubenswrapper[5003]: I0104 12:00:49.577441 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mx9zj" podStartSLOduration=2.14389091 podStartE2EDuration="4.577420945s" podCreationTimestamp="2026-01-04 12:00:45 +0000 UTC" firstStartedPulling="2026-01-04 12:00:46.512557237 +0000 UTC m=+761.985587068" lastFinishedPulling="2026-01-04 12:00:48.946087262 +0000 UTC m=+764.419117103" observedRunningTime="2026-01-04 12:00:49.577360024 +0000 UTC m=+765.050389915" watchObservedRunningTime="2026-01-04 12:00:49.577420945 +0000 UTC m=+765.050450796" Jan 04 12:00:50 crc kubenswrapper[5003]: I0104 12:00:50.554751 5003 generic.go:334] "Generic (PLEG): container finished" podID="d72d2173-7ced-4d5d-bd95-ee65cbf3ee66" containerID="c0dbd2102ed5f4cd90e5d5bbd4d9d9a9143cf4787defbfd90767d1314d677168" exitCode=0 Jan 04 12:00:50 crc kubenswrapper[5003]: I0104 12:00:50.554837 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj" event={"ID":"d72d2173-7ced-4d5d-bd95-ee65cbf3ee66","Type":"ContainerDied","Data":"c0dbd2102ed5f4cd90e5d5bbd4d9d9a9143cf4787defbfd90767d1314d677168"} Jan 04 12:00:51 crc kubenswrapper[5003]: I0104 12:00:51.564783 5003 generic.go:334] "Generic (PLEG): container finished" podID="d72d2173-7ced-4d5d-bd95-ee65cbf3ee66" containerID="0e78a56d425422f23f24aaf8d66117bd788537e2aaf511f1cdcf4cc3bdb82030" exitCode=0 Jan 04 12:00:51 crc kubenswrapper[5003]: I0104 12:00:51.564871 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj" event={"ID":"d72d2173-7ced-4d5d-bd95-ee65cbf3ee66","Type":"ContainerDied","Data":"0e78a56d425422f23f24aaf8d66117bd788537e2aaf511f1cdcf4cc3bdb82030"} Jan 04 12:00:52 crc kubenswrapper[5003]: I0104 12:00:52.860302 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj" Jan 04 12:00:53 crc kubenswrapper[5003]: I0104 12:00:53.023789 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d72d2173-7ced-4d5d-bd95-ee65cbf3ee66-util\") pod \"d72d2173-7ced-4d5d-bd95-ee65cbf3ee66\" (UID: \"d72d2173-7ced-4d5d-bd95-ee65cbf3ee66\") " Jan 04 12:00:53 crc kubenswrapper[5003]: I0104 12:00:53.023875 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-59tsh\" (UniqueName: \"kubernetes.io/projected/d72d2173-7ced-4d5d-bd95-ee65cbf3ee66-kube-api-access-59tsh\") pod \"d72d2173-7ced-4d5d-bd95-ee65cbf3ee66\" (UID: \"d72d2173-7ced-4d5d-bd95-ee65cbf3ee66\") " Jan 04 12:00:53 crc kubenswrapper[5003]: I0104 12:00:53.023961 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d72d2173-7ced-4d5d-bd95-ee65cbf3ee66-bundle\") pod \"d72d2173-7ced-4d5d-bd95-ee65cbf3ee66\" (UID: \"d72d2173-7ced-4d5d-bd95-ee65cbf3ee66\") " Jan 04 12:00:53 crc kubenswrapper[5003]: I0104 12:00:53.025137 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d72d2173-7ced-4d5d-bd95-ee65cbf3ee66-bundle" (OuterVolumeSpecName: "bundle") pod "d72d2173-7ced-4d5d-bd95-ee65cbf3ee66" (UID: "d72d2173-7ced-4d5d-bd95-ee65cbf3ee66"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:00:53 crc kubenswrapper[5003]: I0104 12:00:53.030170 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d72d2173-7ced-4d5d-bd95-ee65cbf3ee66-kube-api-access-59tsh" (OuterVolumeSpecName: "kube-api-access-59tsh") pod "d72d2173-7ced-4d5d-bd95-ee65cbf3ee66" (UID: "d72d2173-7ced-4d5d-bd95-ee65cbf3ee66"). InnerVolumeSpecName "kube-api-access-59tsh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:00:53 crc kubenswrapper[5003]: I0104 12:00:53.034103 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d72d2173-7ced-4d5d-bd95-ee65cbf3ee66-util" (OuterVolumeSpecName: "util") pod "d72d2173-7ced-4d5d-bd95-ee65cbf3ee66" (UID: "d72d2173-7ced-4d5d-bd95-ee65cbf3ee66"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:00:53 crc kubenswrapper[5003]: I0104 12:00:53.125303 5003 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d72d2173-7ced-4d5d-bd95-ee65cbf3ee66-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:00:53 crc kubenswrapper[5003]: I0104 12:00:53.125348 5003 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d72d2173-7ced-4d5d-bd95-ee65cbf3ee66-util\") on node \"crc\" DevicePath \"\"" Jan 04 12:00:53 crc kubenswrapper[5003]: I0104 12:00:53.125361 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-59tsh\" (UniqueName: \"kubernetes.io/projected/d72d2173-7ced-4d5d-bd95-ee65cbf3ee66-kube-api-access-59tsh\") on node \"crc\" DevicePath \"\"" Jan 04 12:00:53 crc kubenswrapper[5003]: I0104 12:00:53.581311 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj" event={"ID":"d72d2173-7ced-4d5d-bd95-ee65cbf3ee66","Type":"ContainerDied","Data":"460071060b19a6cf1549537ff6c90221014c0ca6b3f5f5b75cd6a37994689033"} Jan 04 12:00:53 crc kubenswrapper[5003]: I0104 12:00:53.581359 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="460071060b19a6cf1549537ff6c90221014c0ca6b3f5f5b75cd6a37994689033" Jan 04 12:00:53 crc kubenswrapper[5003]: I0104 12:00:53.581365 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj" Jan 04 12:00:55 crc kubenswrapper[5003]: I0104 12:00:55.611174 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mx9zj" Jan 04 12:00:55 crc kubenswrapper[5003]: I0104 12:00:55.611576 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mx9zj" Jan 04 12:00:55 crc kubenswrapper[5003]: I0104 12:00:55.679921 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mx9zj" Jan 04 12:00:56 crc kubenswrapper[5003]: I0104 12:00:56.678718 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mx9zj" Jan 04 12:00:56 crc kubenswrapper[5003]: I0104 12:00:56.746412 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mx9zj"] Jan 04 12:00:58 crc kubenswrapper[5003]: I0104 12:00:58.615551 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mx9zj" podUID="410b4d7a-8f19-4fc6-9f31-f67d2036af04" containerName="registry-server" containerID="cri-o://ad88535f19806b5a9f2910e22b7f1fa1f1dd77791250f92f0b808751828d8b48" gracePeriod=2 Jan 04 12:01:01 crc kubenswrapper[5003]: I0104 12:01:01.639424 5003 generic.go:334] "Generic (PLEG): container finished" podID="410b4d7a-8f19-4fc6-9f31-f67d2036af04" containerID="ad88535f19806b5a9f2910e22b7f1fa1f1dd77791250f92f0b808751828d8b48" exitCode=0 Jan 04 12:01:01 crc kubenswrapper[5003]: I0104 12:01:01.639485 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mx9zj" event={"ID":"410b4d7a-8f19-4fc6-9f31-f67d2036af04","Type":"ContainerDied","Data":"ad88535f19806b5a9f2910e22b7f1fa1f1dd77791250f92f0b808751828d8b48"} Jan 04 12:01:01 crc kubenswrapper[5003]: I0104 12:01:01.760740 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-6769fb99d-nngv8"] Jan 04 12:01:01 crc kubenswrapper[5003]: E0104 12:01:01.761084 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d72d2173-7ced-4d5d-bd95-ee65cbf3ee66" containerName="pull" Jan 04 12:01:01 crc kubenswrapper[5003]: I0104 12:01:01.761101 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d72d2173-7ced-4d5d-bd95-ee65cbf3ee66" containerName="pull" Jan 04 12:01:01 crc kubenswrapper[5003]: E0104 12:01:01.761115 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d72d2173-7ced-4d5d-bd95-ee65cbf3ee66" containerName="util" Jan 04 12:01:01 crc kubenswrapper[5003]: I0104 12:01:01.761123 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d72d2173-7ced-4d5d-bd95-ee65cbf3ee66" containerName="util" Jan 04 12:01:01 crc kubenswrapper[5003]: E0104 12:01:01.761131 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d72d2173-7ced-4d5d-bd95-ee65cbf3ee66" containerName="extract" Jan 04 12:01:01 crc kubenswrapper[5003]: I0104 12:01:01.761139 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d72d2173-7ced-4d5d-bd95-ee65cbf3ee66" containerName="extract" Jan 04 12:01:01 crc kubenswrapper[5003]: I0104 12:01:01.761783 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d72d2173-7ced-4d5d-bd95-ee65cbf3ee66" containerName="extract" Jan 04 12:01:01 crc kubenswrapper[5003]: I0104 12:01:01.762384 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-6769fb99d-nngv8" Jan 04 12:01:01 crc kubenswrapper[5003]: I0104 12:01:01.764968 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Jan 04 12:01:01 crc kubenswrapper[5003]: I0104 12:01:01.765111 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Jan 04 12:01:01 crc kubenswrapper[5003]: I0104 12:01:01.765189 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-qckd5" Jan 04 12:01:01 crc kubenswrapper[5003]: I0104 12:01:01.778165 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-6769fb99d-nngv8"] Jan 04 12:01:01 crc kubenswrapper[5003]: I0104 12:01:01.851316 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtvxg\" (UniqueName: \"kubernetes.io/projected/034caf29-3d2b-442f-9524-bbb547b0b8bc-kube-api-access-dtvxg\") pod \"nmstate-operator-6769fb99d-nngv8\" (UID: \"034caf29-3d2b-442f-9524-bbb547b0b8bc\") " pod="openshift-nmstate/nmstate-operator-6769fb99d-nngv8" Jan 04 12:01:01 crc kubenswrapper[5003]: I0104 12:01:01.953083 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtvxg\" (UniqueName: \"kubernetes.io/projected/034caf29-3d2b-442f-9524-bbb547b0b8bc-kube-api-access-dtvxg\") pod \"nmstate-operator-6769fb99d-nngv8\" (UID: \"034caf29-3d2b-442f-9524-bbb547b0b8bc\") " pod="openshift-nmstate/nmstate-operator-6769fb99d-nngv8" Jan 04 12:01:01 crc kubenswrapper[5003]: I0104 12:01:01.982758 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtvxg\" (UniqueName: \"kubernetes.io/projected/034caf29-3d2b-442f-9524-bbb547b0b8bc-kube-api-access-dtvxg\") pod \"nmstate-operator-6769fb99d-nngv8\" (UID: \"034caf29-3d2b-442f-9524-bbb547b0b8bc\") " pod="openshift-nmstate/nmstate-operator-6769fb99d-nngv8" Jan 04 12:01:02 crc kubenswrapper[5003]: I0104 12:01:02.079529 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-6769fb99d-nngv8" Jan 04 12:01:02 crc kubenswrapper[5003]: I0104 12:01:02.307280 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-6769fb99d-nngv8"] Jan 04 12:01:02 crc kubenswrapper[5003]: I0104 12:01:02.646275 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-6769fb99d-nngv8" event={"ID":"034caf29-3d2b-442f-9524-bbb547b0b8bc","Type":"ContainerStarted","Data":"bc3005a7f4c8e9ba29b337d42b8dbfa487f878eed06b44461d56d9b255c9ef5e"} Jan 04 12:01:02 crc kubenswrapper[5003]: I0104 12:01:02.689255 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mx9zj" Jan 04 12:01:02 crc kubenswrapper[5003]: I0104 12:01:02.762941 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hngkc\" (UniqueName: \"kubernetes.io/projected/410b4d7a-8f19-4fc6-9f31-f67d2036af04-kube-api-access-hngkc\") pod \"410b4d7a-8f19-4fc6-9f31-f67d2036af04\" (UID: \"410b4d7a-8f19-4fc6-9f31-f67d2036af04\") " Jan 04 12:01:02 crc kubenswrapper[5003]: I0104 12:01:02.763146 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/410b4d7a-8f19-4fc6-9f31-f67d2036af04-utilities\") pod \"410b4d7a-8f19-4fc6-9f31-f67d2036af04\" (UID: \"410b4d7a-8f19-4fc6-9f31-f67d2036af04\") " Jan 04 12:01:02 crc kubenswrapper[5003]: I0104 12:01:02.763244 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/410b4d7a-8f19-4fc6-9f31-f67d2036af04-catalog-content\") pod \"410b4d7a-8f19-4fc6-9f31-f67d2036af04\" (UID: \"410b4d7a-8f19-4fc6-9f31-f67d2036af04\") " Jan 04 12:01:02 crc kubenswrapper[5003]: I0104 12:01:02.766723 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/410b4d7a-8f19-4fc6-9f31-f67d2036af04-utilities" (OuterVolumeSpecName: "utilities") pod "410b4d7a-8f19-4fc6-9f31-f67d2036af04" (UID: "410b4d7a-8f19-4fc6-9f31-f67d2036af04"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:01:02 crc kubenswrapper[5003]: I0104 12:01:02.771553 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/410b4d7a-8f19-4fc6-9f31-f67d2036af04-kube-api-access-hngkc" (OuterVolumeSpecName: "kube-api-access-hngkc") pod "410b4d7a-8f19-4fc6-9f31-f67d2036af04" (UID: "410b4d7a-8f19-4fc6-9f31-f67d2036af04"). InnerVolumeSpecName "kube-api-access-hngkc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:01:02 crc kubenswrapper[5003]: I0104 12:01:02.864588 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/410b4d7a-8f19-4fc6-9f31-f67d2036af04-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:02 crc kubenswrapper[5003]: I0104 12:01:02.864625 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hngkc\" (UniqueName: \"kubernetes.io/projected/410b4d7a-8f19-4fc6-9f31-f67d2036af04-kube-api-access-hngkc\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:02 crc kubenswrapper[5003]: I0104 12:01:02.914395 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/410b4d7a-8f19-4fc6-9f31-f67d2036af04-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "410b4d7a-8f19-4fc6-9f31-f67d2036af04" (UID: "410b4d7a-8f19-4fc6-9f31-f67d2036af04"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:01:02 crc kubenswrapper[5003]: I0104 12:01:02.965282 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/410b4d7a-8f19-4fc6-9f31-f67d2036af04-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:03 crc kubenswrapper[5003]: I0104 12:01:03.656075 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mx9zj" event={"ID":"410b4d7a-8f19-4fc6-9f31-f67d2036af04","Type":"ContainerDied","Data":"012fe0fcc7875a3f670e41a8060d7a63a0d6b25de6a30e46ed2872b8ee02fdbb"} Jan 04 12:01:03 crc kubenswrapper[5003]: I0104 12:01:03.656163 5003 scope.go:117] "RemoveContainer" containerID="ad88535f19806b5a9f2910e22b7f1fa1f1dd77791250f92f0b808751828d8b48" Jan 04 12:01:03 crc kubenswrapper[5003]: I0104 12:01:03.656222 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mx9zj" Jan 04 12:01:03 crc kubenswrapper[5003]: I0104 12:01:03.674303 5003 scope.go:117] "RemoveContainer" containerID="289f660221a0766eddb64be76c488b789204fa0296cdf508831f08bc016bc974" Jan 04 12:01:03 crc kubenswrapper[5003]: I0104 12:01:03.696900 5003 scope.go:117] "RemoveContainer" containerID="e92cd2874783249c88a9e95d03e1d445f4231d33d7daf222abb4aa0e0366b15c" Jan 04 12:01:03 crc kubenswrapper[5003]: I0104 12:01:03.703814 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mx9zj"] Jan 04 12:01:03 crc kubenswrapper[5003]: I0104 12:01:03.708295 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mx9zj"] Jan 04 12:01:04 crc kubenswrapper[5003]: I0104 12:01:04.816934 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="410b4d7a-8f19-4fc6-9f31-f67d2036af04" path="/var/lib/kubelet/pods/410b4d7a-8f19-4fc6-9f31-f67d2036af04/volumes" Jan 04 12:01:05 crc kubenswrapper[5003]: I0104 12:01:05.672452 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-6769fb99d-nngv8" event={"ID":"034caf29-3d2b-442f-9524-bbb547b0b8bc","Type":"ContainerStarted","Data":"6aaf9ddb0776fb37ad9a92b73e9dc442607ca3bf8b54847e8a8e2682e3149c9e"} Jan 04 12:01:05 crc kubenswrapper[5003]: I0104 12:01:05.688176 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-6769fb99d-nngv8" podStartSLOduration=2.459014376 podStartE2EDuration="4.688156867s" podCreationTimestamp="2026-01-04 12:01:01 +0000 UTC" firstStartedPulling="2026-01-04 12:01:02.31332526 +0000 UTC m=+777.786355101" lastFinishedPulling="2026-01-04 12:01:04.542467751 +0000 UTC m=+780.015497592" observedRunningTime="2026-01-04 12:01:05.687820958 +0000 UTC m=+781.160850809" watchObservedRunningTime="2026-01-04 12:01:05.688156867 +0000 UTC m=+781.161186718" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.635252 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f7f7578db-fldh6"] Jan 04 12:01:06 crc kubenswrapper[5003]: E0104 12:01:06.636001 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="410b4d7a-8f19-4fc6-9f31-f67d2036af04" containerName="extract-content" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.636036 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="410b4d7a-8f19-4fc6-9f31-f67d2036af04" containerName="extract-content" Jan 04 12:01:06 crc kubenswrapper[5003]: E0104 12:01:06.636054 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="410b4d7a-8f19-4fc6-9f31-f67d2036af04" containerName="extract-utilities" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.636063 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="410b4d7a-8f19-4fc6-9f31-f67d2036af04" containerName="extract-utilities" Jan 04 12:01:06 crc kubenswrapper[5003]: E0104 12:01:06.636080 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="410b4d7a-8f19-4fc6-9f31-f67d2036af04" containerName="registry-server" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.636089 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="410b4d7a-8f19-4fc6-9f31-f67d2036af04" containerName="registry-server" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.636229 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="410b4d7a-8f19-4fc6-9f31-f67d2036af04" containerName="registry-server" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.636936 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-fldh6" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.639393 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-f8fb84555-tc9qf"] Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.639927 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-f8fb84555-tc9qf" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.642894 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-glplm" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.648347 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.662544 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f7f7578db-fldh6"] Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.672635 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-v2x7n"] Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.673571 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-v2x7n" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.675714 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-f8fb84555-tc9qf"] Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.717139 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/10341446-4c79-4087-8c82-80ffcb35f39a-nmstate-lock\") pod \"nmstate-handler-v2x7n\" (UID: \"10341446-4c79-4087-8c82-80ffcb35f39a\") " pod="openshift-nmstate/nmstate-handler-v2x7n" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.717181 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dv7l4\" (UniqueName: \"kubernetes.io/projected/10341446-4c79-4087-8c82-80ffcb35f39a-kube-api-access-dv7l4\") pod \"nmstate-handler-v2x7n\" (UID: \"10341446-4c79-4087-8c82-80ffcb35f39a\") " pod="openshift-nmstate/nmstate-handler-v2x7n" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.717213 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgftp\" (UniqueName: \"kubernetes.io/projected/1cbd3f4f-d288-4588-a0c4-1e616c0e510a-kube-api-access-rgftp\") pod \"nmstate-metrics-7f7f7578db-fldh6\" (UID: \"1cbd3f4f-d288-4588-a0c4-1e616c0e510a\") " pod="openshift-nmstate/nmstate-metrics-7f7f7578db-fldh6" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.717239 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/1e4f1d48-3190-4f87-8052-8c722ca87582-tls-key-pair\") pod \"nmstate-webhook-f8fb84555-tc9qf\" (UID: \"1e4f1d48-3190-4f87-8052-8c722ca87582\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-tc9qf" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.717445 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/10341446-4c79-4087-8c82-80ffcb35f39a-dbus-socket\") pod \"nmstate-handler-v2x7n\" (UID: \"10341446-4c79-4087-8c82-80ffcb35f39a\") " pod="openshift-nmstate/nmstate-handler-v2x7n" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.717535 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/10341446-4c79-4087-8c82-80ffcb35f39a-ovs-socket\") pod \"nmstate-handler-v2x7n\" (UID: \"10341446-4c79-4087-8c82-80ffcb35f39a\") " pod="openshift-nmstate/nmstate-handler-v2x7n" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.717579 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9svzw\" (UniqueName: \"kubernetes.io/projected/1e4f1d48-3190-4f87-8052-8c722ca87582-kube-api-access-9svzw\") pod \"nmstate-webhook-f8fb84555-tc9qf\" (UID: \"1e4f1d48-3190-4f87-8052-8c722ca87582\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-tc9qf" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.797680 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6ff7998486-zxckq"] Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.798596 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-zxckq" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.802759 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.802893 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.805698 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-mdnv5" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.817849 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6ff7998486-zxckq"] Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.818094 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/86712d7d-38bd-4a51-842d-c168bf155f04-plugin-serving-cert\") pod \"nmstate-console-plugin-6ff7998486-zxckq\" (UID: \"86712d7d-38bd-4a51-842d-c168bf155f04\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-zxckq" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.818138 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jgxpp\" (UniqueName: \"kubernetes.io/projected/86712d7d-38bd-4a51-842d-c168bf155f04-kube-api-access-jgxpp\") pod \"nmstate-console-plugin-6ff7998486-zxckq\" (UID: \"86712d7d-38bd-4a51-842d-c168bf155f04\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-zxckq" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.818161 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/10341446-4c79-4087-8c82-80ffcb35f39a-nmstate-lock\") pod \"nmstate-handler-v2x7n\" (UID: \"10341446-4c79-4087-8c82-80ffcb35f39a\") " pod="openshift-nmstate/nmstate-handler-v2x7n" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.818180 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dv7l4\" (UniqueName: \"kubernetes.io/projected/10341446-4c79-4087-8c82-80ffcb35f39a-kube-api-access-dv7l4\") pod \"nmstate-handler-v2x7n\" (UID: \"10341446-4c79-4087-8c82-80ffcb35f39a\") " pod="openshift-nmstate/nmstate-handler-v2x7n" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.818219 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgftp\" (UniqueName: \"kubernetes.io/projected/1cbd3f4f-d288-4588-a0c4-1e616c0e510a-kube-api-access-rgftp\") pod \"nmstate-metrics-7f7f7578db-fldh6\" (UID: \"1cbd3f4f-d288-4588-a0c4-1e616c0e510a\") " pod="openshift-nmstate/nmstate-metrics-7f7f7578db-fldh6" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.818246 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/1e4f1d48-3190-4f87-8052-8c722ca87582-tls-key-pair\") pod \"nmstate-webhook-f8fb84555-tc9qf\" (UID: \"1e4f1d48-3190-4f87-8052-8c722ca87582\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-tc9qf" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.818319 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/10341446-4c79-4087-8c82-80ffcb35f39a-nmstate-lock\") pod \"nmstate-handler-v2x7n\" (UID: \"10341446-4c79-4087-8c82-80ffcb35f39a\") " pod="openshift-nmstate/nmstate-handler-v2x7n" Jan 04 12:01:06 crc kubenswrapper[5003]: E0104 12:01:06.818366 5003 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.818374 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/10341446-4c79-4087-8c82-80ffcb35f39a-dbus-socket\") pod \"nmstate-handler-v2x7n\" (UID: \"10341446-4c79-4087-8c82-80ffcb35f39a\") " pod="openshift-nmstate/nmstate-handler-v2x7n" Jan 04 12:01:06 crc kubenswrapper[5003]: E0104 12:01:06.818422 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1e4f1d48-3190-4f87-8052-8c722ca87582-tls-key-pair podName:1e4f1d48-3190-4f87-8052-8c722ca87582 nodeName:}" failed. No retries permitted until 2026-01-04 12:01:07.318402931 +0000 UTC m=+782.791432772 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/1e4f1d48-3190-4f87-8052-8c722ca87582-tls-key-pair") pod "nmstate-webhook-f8fb84555-tc9qf" (UID: "1e4f1d48-3190-4f87-8052-8c722ca87582") : secret "openshift-nmstate-webhook" not found Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.818457 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/10341446-4c79-4087-8c82-80ffcb35f39a-ovs-socket\") pod \"nmstate-handler-v2x7n\" (UID: \"10341446-4c79-4087-8c82-80ffcb35f39a\") " pod="openshift-nmstate/nmstate-handler-v2x7n" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.818485 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9svzw\" (UniqueName: \"kubernetes.io/projected/1e4f1d48-3190-4f87-8052-8c722ca87582-kube-api-access-9svzw\") pod \"nmstate-webhook-f8fb84555-tc9qf\" (UID: \"1e4f1d48-3190-4f87-8052-8c722ca87582\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-tc9qf" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.818520 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/86712d7d-38bd-4a51-842d-c168bf155f04-nginx-conf\") pod \"nmstate-console-plugin-6ff7998486-zxckq\" (UID: \"86712d7d-38bd-4a51-842d-c168bf155f04\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-zxckq" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.818557 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/10341446-4c79-4087-8c82-80ffcb35f39a-ovs-socket\") pod \"nmstate-handler-v2x7n\" (UID: \"10341446-4c79-4087-8c82-80ffcb35f39a\") " pod="openshift-nmstate/nmstate-handler-v2x7n" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.818710 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/10341446-4c79-4087-8c82-80ffcb35f39a-dbus-socket\") pod \"nmstate-handler-v2x7n\" (UID: \"10341446-4c79-4087-8c82-80ffcb35f39a\") " pod="openshift-nmstate/nmstate-handler-v2x7n" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.847448 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dv7l4\" (UniqueName: \"kubernetes.io/projected/10341446-4c79-4087-8c82-80ffcb35f39a-kube-api-access-dv7l4\") pod \"nmstate-handler-v2x7n\" (UID: \"10341446-4c79-4087-8c82-80ffcb35f39a\") " pod="openshift-nmstate/nmstate-handler-v2x7n" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.847637 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9svzw\" (UniqueName: \"kubernetes.io/projected/1e4f1d48-3190-4f87-8052-8c722ca87582-kube-api-access-9svzw\") pod \"nmstate-webhook-f8fb84555-tc9qf\" (UID: \"1e4f1d48-3190-4f87-8052-8c722ca87582\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-tc9qf" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.855488 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgftp\" (UniqueName: \"kubernetes.io/projected/1cbd3f4f-d288-4588-a0c4-1e616c0e510a-kube-api-access-rgftp\") pod \"nmstate-metrics-7f7f7578db-fldh6\" (UID: \"1cbd3f4f-d288-4588-a0c4-1e616c0e510a\") " pod="openshift-nmstate/nmstate-metrics-7f7f7578db-fldh6" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.920041 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/86712d7d-38bd-4a51-842d-c168bf155f04-nginx-conf\") pod \"nmstate-console-plugin-6ff7998486-zxckq\" (UID: \"86712d7d-38bd-4a51-842d-c168bf155f04\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-zxckq" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.920143 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/86712d7d-38bd-4a51-842d-c168bf155f04-plugin-serving-cert\") pod \"nmstate-console-plugin-6ff7998486-zxckq\" (UID: \"86712d7d-38bd-4a51-842d-c168bf155f04\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-zxckq" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.920168 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jgxpp\" (UniqueName: \"kubernetes.io/projected/86712d7d-38bd-4a51-842d-c168bf155f04-kube-api-access-jgxpp\") pod \"nmstate-console-plugin-6ff7998486-zxckq\" (UID: \"86712d7d-38bd-4a51-842d-c168bf155f04\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-zxckq" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.921390 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/86712d7d-38bd-4a51-842d-c168bf155f04-nginx-conf\") pod \"nmstate-console-plugin-6ff7998486-zxckq\" (UID: \"86712d7d-38bd-4a51-842d-c168bf155f04\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-zxckq" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.927274 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/86712d7d-38bd-4a51-842d-c168bf155f04-plugin-serving-cert\") pod \"nmstate-console-plugin-6ff7998486-zxckq\" (UID: \"86712d7d-38bd-4a51-842d-c168bf155f04\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-zxckq" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.944368 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jgxpp\" (UniqueName: \"kubernetes.io/projected/86712d7d-38bd-4a51-842d-c168bf155f04-kube-api-access-jgxpp\") pod \"nmstate-console-plugin-6ff7998486-zxckq\" (UID: \"86712d7d-38bd-4a51-842d-c168bf155f04\") " pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-zxckq" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.956353 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-fldh6" Jan 04 12:01:06 crc kubenswrapper[5003]: I0104 12:01:06.988510 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-v2x7n" Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.009421 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-78df5467d-f5p65"] Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.010566 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-78df5467d-f5p65" Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.020994 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/19eacfaa-5358-4c9b-9718-1eca4aef583b-console-oauth-config\") pod \"console-78df5467d-f5p65\" (UID: \"19eacfaa-5358-4c9b-9718-1eca4aef583b\") " pod="openshift-console/console-78df5467d-f5p65" Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.021163 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/19eacfaa-5358-4c9b-9718-1eca4aef583b-service-ca\") pod \"console-78df5467d-f5p65\" (UID: \"19eacfaa-5358-4c9b-9718-1eca4aef583b\") " pod="openshift-console/console-78df5467d-f5p65" Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.021327 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/19eacfaa-5358-4c9b-9718-1eca4aef583b-trusted-ca-bundle\") pod \"console-78df5467d-f5p65\" (UID: \"19eacfaa-5358-4c9b-9718-1eca4aef583b\") " pod="openshift-console/console-78df5467d-f5p65" Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.021399 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/19eacfaa-5358-4c9b-9718-1eca4aef583b-oauth-serving-cert\") pod \"console-78df5467d-f5p65\" (UID: \"19eacfaa-5358-4c9b-9718-1eca4aef583b\") " pod="openshift-console/console-78df5467d-f5p65" Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.021460 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/19eacfaa-5358-4c9b-9718-1eca4aef583b-console-config\") pod \"console-78df5467d-f5p65\" (UID: \"19eacfaa-5358-4c9b-9718-1eca4aef583b\") " pod="openshift-console/console-78df5467d-f5p65" Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.021524 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/19eacfaa-5358-4c9b-9718-1eca4aef583b-console-serving-cert\") pod \"console-78df5467d-f5p65\" (UID: \"19eacfaa-5358-4c9b-9718-1eca4aef583b\") " pod="openshift-console/console-78df5467d-f5p65" Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.021595 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rh7g\" (UniqueName: \"kubernetes.io/projected/19eacfaa-5358-4c9b-9718-1eca4aef583b-kube-api-access-9rh7g\") pod \"console-78df5467d-f5p65\" (UID: \"19eacfaa-5358-4c9b-9718-1eca4aef583b\") " pod="openshift-console/console-78df5467d-f5p65" Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.026792 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-78df5467d-f5p65"] Jan 04 12:01:07 crc kubenswrapper[5003]: W0104 12:01:07.037389 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod10341446_4c79_4087_8c82_80ffcb35f39a.slice/crio-2e04d3d612d76fe91a3650bf685ab78042285b4480840e9fb0a06c6e039c61d8 WatchSource:0}: Error finding container 2e04d3d612d76fe91a3650bf685ab78042285b4480840e9fb0a06c6e039c61d8: Status 404 returned error can't find the container with id 2e04d3d612d76fe91a3650bf685ab78042285b4480840e9fb0a06c6e039c61d8 Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.112442 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-zxckq" Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.123426 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/19eacfaa-5358-4c9b-9718-1eca4aef583b-oauth-serving-cert\") pod \"console-78df5467d-f5p65\" (UID: \"19eacfaa-5358-4c9b-9718-1eca4aef583b\") " pod="openshift-console/console-78df5467d-f5p65" Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.123469 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/19eacfaa-5358-4c9b-9718-1eca4aef583b-console-config\") pod \"console-78df5467d-f5p65\" (UID: \"19eacfaa-5358-4c9b-9718-1eca4aef583b\") " pod="openshift-console/console-78df5467d-f5p65" Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.123505 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/19eacfaa-5358-4c9b-9718-1eca4aef583b-console-serving-cert\") pod \"console-78df5467d-f5p65\" (UID: \"19eacfaa-5358-4c9b-9718-1eca4aef583b\") " pod="openshift-console/console-78df5467d-f5p65" Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.123531 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rh7g\" (UniqueName: \"kubernetes.io/projected/19eacfaa-5358-4c9b-9718-1eca4aef583b-kube-api-access-9rh7g\") pod \"console-78df5467d-f5p65\" (UID: \"19eacfaa-5358-4c9b-9718-1eca4aef583b\") " pod="openshift-console/console-78df5467d-f5p65" Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.123579 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/19eacfaa-5358-4c9b-9718-1eca4aef583b-service-ca\") pod \"console-78df5467d-f5p65\" (UID: \"19eacfaa-5358-4c9b-9718-1eca4aef583b\") " pod="openshift-console/console-78df5467d-f5p65" Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.123596 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/19eacfaa-5358-4c9b-9718-1eca4aef583b-console-oauth-config\") pod \"console-78df5467d-f5p65\" (UID: \"19eacfaa-5358-4c9b-9718-1eca4aef583b\") " pod="openshift-console/console-78df5467d-f5p65" Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.123664 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/19eacfaa-5358-4c9b-9718-1eca4aef583b-trusted-ca-bundle\") pod \"console-78df5467d-f5p65\" (UID: \"19eacfaa-5358-4c9b-9718-1eca4aef583b\") " pod="openshift-console/console-78df5467d-f5p65" Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.124774 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/19eacfaa-5358-4c9b-9718-1eca4aef583b-console-config\") pod \"console-78df5467d-f5p65\" (UID: \"19eacfaa-5358-4c9b-9718-1eca4aef583b\") " pod="openshift-console/console-78df5467d-f5p65" Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.124816 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/19eacfaa-5358-4c9b-9718-1eca4aef583b-oauth-serving-cert\") pod \"console-78df5467d-f5p65\" (UID: \"19eacfaa-5358-4c9b-9718-1eca4aef583b\") " pod="openshift-console/console-78df5467d-f5p65" Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.125231 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/19eacfaa-5358-4c9b-9718-1eca4aef583b-trusted-ca-bundle\") pod \"console-78df5467d-f5p65\" (UID: \"19eacfaa-5358-4c9b-9718-1eca4aef583b\") " pod="openshift-console/console-78df5467d-f5p65" Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.125757 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/19eacfaa-5358-4c9b-9718-1eca4aef583b-service-ca\") pod \"console-78df5467d-f5p65\" (UID: \"19eacfaa-5358-4c9b-9718-1eca4aef583b\") " pod="openshift-console/console-78df5467d-f5p65" Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.132553 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/19eacfaa-5358-4c9b-9718-1eca4aef583b-console-oauth-config\") pod \"console-78df5467d-f5p65\" (UID: \"19eacfaa-5358-4c9b-9718-1eca4aef583b\") " pod="openshift-console/console-78df5467d-f5p65" Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.133005 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/19eacfaa-5358-4c9b-9718-1eca4aef583b-console-serving-cert\") pod \"console-78df5467d-f5p65\" (UID: \"19eacfaa-5358-4c9b-9718-1eca4aef583b\") " pod="openshift-console/console-78df5467d-f5p65" Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.143294 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rh7g\" (UniqueName: \"kubernetes.io/projected/19eacfaa-5358-4c9b-9718-1eca4aef583b-kube-api-access-9rh7g\") pod \"console-78df5467d-f5p65\" (UID: \"19eacfaa-5358-4c9b-9718-1eca4aef583b\") " pod="openshift-console/console-78df5467d-f5p65" Jan 04 12:01:07 crc kubenswrapper[5003]: W0104 12:01:07.188860 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1cbd3f4f_d288_4588_a0c4_1e616c0e510a.slice/crio-9b95c66329a8fb7309427bdbd6090c198ef96f5fad15c985d9a702771563dc9e WatchSource:0}: Error finding container 9b95c66329a8fb7309427bdbd6090c198ef96f5fad15c985d9a702771563dc9e: Status 404 returned error can't find the container with id 9b95c66329a8fb7309427bdbd6090c198ef96f5fad15c985d9a702771563dc9e Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.191252 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f7f7578db-fldh6"] Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.325925 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/1e4f1d48-3190-4f87-8052-8c722ca87582-tls-key-pair\") pod \"nmstate-webhook-f8fb84555-tc9qf\" (UID: \"1e4f1d48-3190-4f87-8052-8c722ca87582\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-tc9qf" Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.329886 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/1e4f1d48-3190-4f87-8052-8c722ca87582-tls-key-pair\") pod \"nmstate-webhook-f8fb84555-tc9qf\" (UID: \"1e4f1d48-3190-4f87-8052-8c722ca87582\") " pod="openshift-nmstate/nmstate-webhook-f8fb84555-tc9qf" Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.334886 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-78df5467d-f5p65" Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.521567 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6ff7998486-zxckq"] Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.546544 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-78df5467d-f5p65"] Jan 04 12:01:07 crc kubenswrapper[5003]: W0104 12:01:07.553373 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod19eacfaa_5358_4c9b_9718_1eca4aef583b.slice/crio-23ac0a90bacc096d57c72ee000c7806425cc1fd875467fc0bb3185b931e0a3eb WatchSource:0}: Error finding container 23ac0a90bacc096d57c72ee000c7806425cc1fd875467fc0bb3185b931e0a3eb: Status 404 returned error can't find the container with id 23ac0a90bacc096d57c72ee000c7806425cc1fd875467fc0bb3185b931e0a3eb Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.563733 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-f8fb84555-tc9qf" Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.686928 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-78df5467d-f5p65" event={"ID":"19eacfaa-5358-4c9b-9718-1eca4aef583b","Type":"ContainerStarted","Data":"23ac0a90bacc096d57c72ee000c7806425cc1fd875467fc0bb3185b931e0a3eb"} Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.690911 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-fldh6" event={"ID":"1cbd3f4f-d288-4588-a0c4-1e616c0e510a","Type":"ContainerStarted","Data":"9b95c66329a8fb7309427bdbd6090c198ef96f5fad15c985d9a702771563dc9e"} Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.691966 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-v2x7n" event={"ID":"10341446-4c79-4087-8c82-80ffcb35f39a","Type":"ContainerStarted","Data":"2e04d3d612d76fe91a3650bf685ab78042285b4480840e9fb0a06c6e039c61d8"} Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.692715 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-zxckq" event={"ID":"86712d7d-38bd-4a51-842d-c168bf155f04","Type":"ContainerStarted","Data":"558a3a522bf205afd4d0a7bf1a39e142ced208fa345fd3852df86777fb5b3689"} Jan 04 12:01:07 crc kubenswrapper[5003]: I0104 12:01:07.783499 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-f8fb84555-tc9qf"] Jan 04 12:01:08 crc kubenswrapper[5003]: I0104 12:01:08.705082 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-f8fb84555-tc9qf" event={"ID":"1e4f1d48-3190-4f87-8052-8c722ca87582","Type":"ContainerStarted","Data":"81dea7ffda632f4c62509709bc5debfbc0fb583d8c530b0623a3f29ee1ed93d5"} Jan 04 12:01:08 crc kubenswrapper[5003]: I0104 12:01:08.707407 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-78df5467d-f5p65" event={"ID":"19eacfaa-5358-4c9b-9718-1eca4aef583b","Type":"ContainerStarted","Data":"5f874facc7fa54c910de0312a26055c6b17b0b2490faead74da15b55fd521617"} Jan 04 12:01:08 crc kubenswrapper[5003]: I0104 12:01:08.739129 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-78df5467d-f5p65" podStartSLOduration=2.739098394 podStartE2EDuration="2.739098394s" podCreationTimestamp="2026-01-04 12:01:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:01:08.739089644 +0000 UTC m=+784.212119495" watchObservedRunningTime="2026-01-04 12:01:08.739098394 +0000 UTC m=+784.212128285" Jan 04 12:01:09 crc kubenswrapper[5003]: I0104 12:01:09.419392 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:01:09 crc kubenswrapper[5003]: I0104 12:01:09.419578 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:01:09 crc kubenswrapper[5003]: I0104 12:01:09.419642 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 12:01:09 crc kubenswrapper[5003]: I0104 12:01:09.420459 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"544543b77ddff68504c56117c730883967d4ef6eb8006a6d7bde181f583bbabc"} pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 12:01:09 crc kubenswrapper[5003]: I0104 12:01:09.420520 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" containerID="cri-o://544543b77ddff68504c56117c730883967d4ef6eb8006a6d7bde181f583bbabc" gracePeriod=600 Jan 04 12:01:09 crc kubenswrapper[5003]: I0104 12:01:09.719877 5003 generic.go:334] "Generic (PLEG): container finished" podID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerID="544543b77ddff68504c56117c730883967d4ef6eb8006a6d7bde181f583bbabc" exitCode=0 Jan 04 12:01:09 crc kubenswrapper[5003]: I0104 12:01:09.719946 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerDied","Data":"544543b77ddff68504c56117c730883967d4ef6eb8006a6d7bde181f583bbabc"} Jan 04 12:01:09 crc kubenswrapper[5003]: I0104 12:01:09.720391 5003 scope.go:117] "RemoveContainer" containerID="e6d83c13c4c3536bde3eb2f53b4fd3ead25d7f9dc8d9bc8c2b42265b335adda3" Jan 04 12:01:10 crc kubenswrapper[5003]: I0104 12:01:10.730193 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerStarted","Data":"4118d04e178a916ef0fb795859c3a8da20b43a18e967d67161d5ece95b07366c"} Jan 04 12:01:10 crc kubenswrapper[5003]: I0104 12:01:10.734634 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-zxckq" event={"ID":"86712d7d-38bd-4a51-842d-c168bf155f04","Type":"ContainerStarted","Data":"d12ba90c036bb5c25ddc89811c20d396c25cc3e3e8a034b86f8ea59fbaa69a5c"} Jan 04 12:01:10 crc kubenswrapper[5003]: I0104 12:01:10.737680 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-f8fb84555-tc9qf" event={"ID":"1e4f1d48-3190-4f87-8052-8c722ca87582","Type":"ContainerStarted","Data":"68d30ff190c7c165d183321f0491428000dcd57dbe218575e4fe60f80843189d"} Jan 04 12:01:10 crc kubenswrapper[5003]: I0104 12:01:10.737865 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-f8fb84555-tc9qf" Jan 04 12:01:10 crc kubenswrapper[5003]: I0104 12:01:10.739528 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-fldh6" event={"ID":"1cbd3f4f-d288-4588-a0c4-1e616c0e510a","Type":"ContainerStarted","Data":"2eb9f04fca4e088a259b3aff90bb10654b87d3f413afdbd91ad12005ce751843"} Jan 04 12:01:10 crc kubenswrapper[5003]: I0104 12:01:10.773976 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-f8fb84555-tc9qf" podStartSLOduration=2.127283761 podStartE2EDuration="4.773951132s" podCreationTimestamp="2026-01-04 12:01:06 +0000 UTC" firstStartedPulling="2026-01-04 12:01:07.818150104 +0000 UTC m=+783.291179945" lastFinishedPulling="2026-01-04 12:01:10.464817465 +0000 UTC m=+785.937847316" observedRunningTime="2026-01-04 12:01:10.771920488 +0000 UTC m=+786.244950319" watchObservedRunningTime="2026-01-04 12:01:10.773951132 +0000 UTC m=+786.246980973" Jan 04 12:01:10 crc kubenswrapper[5003]: I0104 12:01:10.792641 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-6ff7998486-zxckq" podStartSLOduration=1.937366784 podStartE2EDuration="4.79262544s" podCreationTimestamp="2026-01-04 12:01:06 +0000 UTC" firstStartedPulling="2026-01-04 12:01:07.536332035 +0000 UTC m=+783.009361876" lastFinishedPulling="2026-01-04 12:01:10.391590671 +0000 UTC m=+785.864620532" observedRunningTime="2026-01-04 12:01:10.785576102 +0000 UTC m=+786.258605943" watchObservedRunningTime="2026-01-04 12:01:10.79262544 +0000 UTC m=+786.265655281" Jan 04 12:01:11 crc kubenswrapper[5003]: I0104 12:01:11.747594 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-v2x7n" event={"ID":"10341446-4c79-4087-8c82-80ffcb35f39a","Type":"ContainerStarted","Data":"c68b95c968ca7c170986d69f289b0c32ab33316e9b98cc541645d210cb402d91"} Jan 04 12:01:11 crc kubenswrapper[5003]: I0104 12:01:11.777265 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-v2x7n" podStartSLOduration=2.412305946 podStartE2EDuration="5.777227799s" podCreationTimestamp="2026-01-04 12:01:06 +0000 UTC" firstStartedPulling="2026-01-04 12:01:07.041289808 +0000 UTC m=+782.514319639" lastFinishedPulling="2026-01-04 12:01:10.406211651 +0000 UTC m=+785.879241492" observedRunningTime="2026-01-04 12:01:11.767404757 +0000 UTC m=+787.240434668" watchObservedRunningTime="2026-01-04 12:01:11.777227799 +0000 UTC m=+787.250257680" Jan 04 12:01:11 crc kubenswrapper[5003]: I0104 12:01:11.990063 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-v2x7n" Jan 04 12:01:13 crc kubenswrapper[5003]: I0104 12:01:13.767505 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-fldh6" event={"ID":"1cbd3f4f-d288-4588-a0c4-1e616c0e510a","Type":"ContainerStarted","Data":"2daf87085d72ef6a9daad8eaf0572a54d6f29c76dd61a6e7b71df1f84ea0f80a"} Jan 04 12:01:17 crc kubenswrapper[5003]: I0104 12:01:17.033183 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-v2x7n" Jan 04 12:01:17 crc kubenswrapper[5003]: I0104 12:01:17.062758 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f7f7578db-fldh6" podStartSLOduration=5.04301124 podStartE2EDuration="11.062733592s" podCreationTimestamp="2026-01-04 12:01:06 +0000 UTC" firstStartedPulling="2026-01-04 12:01:07.192630945 +0000 UTC m=+782.665660786" lastFinishedPulling="2026-01-04 12:01:13.212353287 +0000 UTC m=+788.685383138" observedRunningTime="2026-01-04 12:01:13.803428717 +0000 UTC m=+789.276458578" watchObservedRunningTime="2026-01-04 12:01:17.062733592 +0000 UTC m=+792.535763443" Jan 04 12:01:17 crc kubenswrapper[5003]: I0104 12:01:17.336040 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-78df5467d-f5p65" Jan 04 12:01:17 crc kubenswrapper[5003]: I0104 12:01:17.336096 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-78df5467d-f5p65" Jan 04 12:01:17 crc kubenswrapper[5003]: I0104 12:01:17.343555 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-78df5467d-f5p65" Jan 04 12:01:17 crc kubenswrapper[5003]: I0104 12:01:17.800814 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-78df5467d-f5p65" Jan 04 12:01:17 crc kubenswrapper[5003]: I0104 12:01:17.873652 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-2jt99"] Jan 04 12:01:27 crc kubenswrapper[5003]: I0104 12:01:27.574351 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-f8fb84555-tc9qf" Jan 04 12:01:42 crc kubenswrapper[5003]: I0104 12:01:42.523055 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s"] Jan 04 12:01:42 crc kubenswrapper[5003]: I0104 12:01:42.524931 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s" Jan 04 12:01:42 crc kubenswrapper[5003]: I0104 12:01:42.527573 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 04 12:01:42 crc kubenswrapper[5003]: I0104 12:01:42.576198 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s"] Jan 04 12:01:42 crc kubenswrapper[5003]: I0104 12:01:42.700358 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/afaa573c-f32d-423d-a608-1d5443d9b498-util\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s\" (UID: \"afaa573c-f32d-423d-a608-1d5443d9b498\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s" Jan 04 12:01:42 crc kubenswrapper[5003]: I0104 12:01:42.700453 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/afaa573c-f32d-423d-a608-1d5443d9b498-bundle\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s\" (UID: \"afaa573c-f32d-423d-a608-1d5443d9b498\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s" Jan 04 12:01:42 crc kubenswrapper[5003]: I0104 12:01:42.700503 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkzjg\" (UniqueName: \"kubernetes.io/projected/afaa573c-f32d-423d-a608-1d5443d9b498-kube-api-access-bkzjg\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s\" (UID: \"afaa573c-f32d-423d-a608-1d5443d9b498\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s" Jan 04 12:01:42 crc kubenswrapper[5003]: I0104 12:01:42.801711 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/afaa573c-f32d-423d-a608-1d5443d9b498-util\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s\" (UID: \"afaa573c-f32d-423d-a608-1d5443d9b498\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s" Jan 04 12:01:42 crc kubenswrapper[5003]: I0104 12:01:42.801769 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/afaa573c-f32d-423d-a608-1d5443d9b498-bundle\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s\" (UID: \"afaa573c-f32d-423d-a608-1d5443d9b498\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s" Jan 04 12:01:42 crc kubenswrapper[5003]: I0104 12:01:42.801804 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkzjg\" (UniqueName: \"kubernetes.io/projected/afaa573c-f32d-423d-a608-1d5443d9b498-kube-api-access-bkzjg\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s\" (UID: \"afaa573c-f32d-423d-a608-1d5443d9b498\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s" Jan 04 12:01:42 crc kubenswrapper[5003]: I0104 12:01:42.802456 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/afaa573c-f32d-423d-a608-1d5443d9b498-util\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s\" (UID: \"afaa573c-f32d-423d-a608-1d5443d9b498\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s" Jan 04 12:01:42 crc kubenswrapper[5003]: I0104 12:01:42.802660 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/afaa573c-f32d-423d-a608-1d5443d9b498-bundle\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s\" (UID: \"afaa573c-f32d-423d-a608-1d5443d9b498\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s" Jan 04 12:01:42 crc kubenswrapper[5003]: I0104 12:01:42.829771 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkzjg\" (UniqueName: \"kubernetes.io/projected/afaa573c-f32d-423d-a608-1d5443d9b498-kube-api-access-bkzjg\") pod \"5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s\" (UID: \"afaa573c-f32d-423d-a608-1d5443d9b498\") " pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s" Jan 04 12:01:42 crc kubenswrapper[5003]: I0104 12:01:42.881844 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s" Jan 04 12:01:42 crc kubenswrapper[5003]: I0104 12:01:42.930699 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-2jt99" podUID="b43f39ef-60cd-44be-8061-715fbf71a36b" containerName="console" containerID="cri-o://63c8ee81a5000ef78c9c00b022f2b4b04b7a06dac08763d1177201e2bd0ab263" gracePeriod=15 Jan 04 12:01:43 crc kubenswrapper[5003]: I0104 12:01:43.103751 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s"] Jan 04 12:01:43 crc kubenswrapper[5003]: W0104 12:01:43.107440 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podafaa573c_f32d_423d_a608_1d5443d9b498.slice/crio-110ee0acd9a843a97393b0ec9c1ac6653b0ab1ea3593becb712f07c28b8ddf1c WatchSource:0}: Error finding container 110ee0acd9a843a97393b0ec9c1ac6653b0ab1ea3593becb712f07c28b8ddf1c: Status 404 returned error can't find the container with id 110ee0acd9a843a97393b0ec9c1ac6653b0ab1ea3593becb712f07c28b8ddf1c Jan 04 12:01:43 crc kubenswrapper[5003]: I0104 12:01:43.323861 5003 patch_prober.go:28] interesting pod/console-f9d7485db-2jt99 container/console namespace/openshift-console: Readiness probe status=failure output="Get \"https://10.217.0.39:8443/health\": dial tcp 10.217.0.39:8443: connect: connection refused" start-of-body= Jan 04 12:01:43 crc kubenswrapper[5003]: I0104 12:01:43.324238 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/console-f9d7485db-2jt99" podUID="b43f39ef-60cd-44be-8061-715fbf71a36b" containerName="console" probeResult="failure" output="Get \"https://10.217.0.39:8443/health\": dial tcp 10.217.0.39:8443: connect: connection refused" Jan 04 12:01:43 crc kubenswrapper[5003]: I0104 12:01:43.988310 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-2jt99_b43f39ef-60cd-44be-8061-715fbf71a36b/console/0.log" Jan 04 12:01:43 crc kubenswrapper[5003]: I0104 12:01:43.988361 5003 generic.go:334] "Generic (PLEG): container finished" podID="b43f39ef-60cd-44be-8061-715fbf71a36b" containerID="63c8ee81a5000ef78c9c00b022f2b4b04b7a06dac08763d1177201e2bd0ab263" exitCode=2 Jan 04 12:01:43 crc kubenswrapper[5003]: I0104 12:01:43.988461 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-2jt99" event={"ID":"b43f39ef-60cd-44be-8061-715fbf71a36b","Type":"ContainerDied","Data":"63c8ee81a5000ef78c9c00b022f2b4b04b7a06dac08763d1177201e2bd0ab263"} Jan 04 12:01:43 crc kubenswrapper[5003]: I0104 12:01:43.991191 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s" event={"ID":"afaa573c-f32d-423d-a608-1d5443d9b498","Type":"ContainerStarted","Data":"f8a5df705667e44e079a941b4d89ff98bf415448863c53870fc682ae15b74b98"} Jan 04 12:01:43 crc kubenswrapper[5003]: I0104 12:01:43.991273 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s" event={"ID":"afaa573c-f32d-423d-a608-1d5443d9b498","Type":"ContainerStarted","Data":"110ee0acd9a843a97393b0ec9c1ac6653b0ab1ea3593becb712f07c28b8ddf1c"} Jan 04 12:01:44 crc kubenswrapper[5003]: I0104 12:01:44.160882 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-2jt99_b43f39ef-60cd-44be-8061-715fbf71a36b/console/0.log" Jan 04 12:01:44 crc kubenswrapper[5003]: I0104 12:01:44.160980 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-2jt99" Jan 04 12:01:44 crc kubenswrapper[5003]: I0104 12:01:44.322391 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hq88c\" (UniqueName: \"kubernetes.io/projected/b43f39ef-60cd-44be-8061-715fbf71a36b-kube-api-access-hq88c\") pod \"b43f39ef-60cd-44be-8061-715fbf71a36b\" (UID: \"b43f39ef-60cd-44be-8061-715fbf71a36b\") " Jan 04 12:01:44 crc kubenswrapper[5003]: I0104 12:01:44.322432 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b43f39ef-60cd-44be-8061-715fbf71a36b-console-serving-cert\") pod \"b43f39ef-60cd-44be-8061-715fbf71a36b\" (UID: \"b43f39ef-60cd-44be-8061-715fbf71a36b\") " Jan 04 12:01:44 crc kubenswrapper[5003]: I0104 12:01:44.322541 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b43f39ef-60cd-44be-8061-715fbf71a36b-oauth-serving-cert\") pod \"b43f39ef-60cd-44be-8061-715fbf71a36b\" (UID: \"b43f39ef-60cd-44be-8061-715fbf71a36b\") " Jan 04 12:01:44 crc kubenswrapper[5003]: I0104 12:01:44.322559 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b43f39ef-60cd-44be-8061-715fbf71a36b-trusted-ca-bundle\") pod \"b43f39ef-60cd-44be-8061-715fbf71a36b\" (UID: \"b43f39ef-60cd-44be-8061-715fbf71a36b\") " Jan 04 12:01:44 crc kubenswrapper[5003]: I0104 12:01:44.322616 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b43f39ef-60cd-44be-8061-715fbf71a36b-console-oauth-config\") pod \"b43f39ef-60cd-44be-8061-715fbf71a36b\" (UID: \"b43f39ef-60cd-44be-8061-715fbf71a36b\") " Jan 04 12:01:44 crc kubenswrapper[5003]: I0104 12:01:44.322677 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b43f39ef-60cd-44be-8061-715fbf71a36b-console-config\") pod \"b43f39ef-60cd-44be-8061-715fbf71a36b\" (UID: \"b43f39ef-60cd-44be-8061-715fbf71a36b\") " Jan 04 12:01:44 crc kubenswrapper[5003]: I0104 12:01:44.322716 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b43f39ef-60cd-44be-8061-715fbf71a36b-service-ca\") pod \"b43f39ef-60cd-44be-8061-715fbf71a36b\" (UID: \"b43f39ef-60cd-44be-8061-715fbf71a36b\") " Jan 04 12:01:44 crc kubenswrapper[5003]: I0104 12:01:44.324224 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b43f39ef-60cd-44be-8061-715fbf71a36b-service-ca" (OuterVolumeSpecName: "service-ca") pod "b43f39ef-60cd-44be-8061-715fbf71a36b" (UID: "b43f39ef-60cd-44be-8061-715fbf71a36b"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:01:44 crc kubenswrapper[5003]: I0104 12:01:44.324285 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b43f39ef-60cd-44be-8061-715fbf71a36b-console-config" (OuterVolumeSpecName: "console-config") pod "b43f39ef-60cd-44be-8061-715fbf71a36b" (UID: "b43f39ef-60cd-44be-8061-715fbf71a36b"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:01:44 crc kubenswrapper[5003]: I0104 12:01:44.324390 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b43f39ef-60cd-44be-8061-715fbf71a36b-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "b43f39ef-60cd-44be-8061-715fbf71a36b" (UID: "b43f39ef-60cd-44be-8061-715fbf71a36b"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:01:44 crc kubenswrapper[5003]: I0104 12:01:44.324410 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b43f39ef-60cd-44be-8061-715fbf71a36b-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "b43f39ef-60cd-44be-8061-715fbf71a36b" (UID: "b43f39ef-60cd-44be-8061-715fbf71a36b"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:01:44 crc kubenswrapper[5003]: I0104 12:01:44.332371 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b43f39ef-60cd-44be-8061-715fbf71a36b-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "b43f39ef-60cd-44be-8061-715fbf71a36b" (UID: "b43f39ef-60cd-44be-8061-715fbf71a36b"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:01:44 crc kubenswrapper[5003]: I0104 12:01:44.332430 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b43f39ef-60cd-44be-8061-715fbf71a36b-kube-api-access-hq88c" (OuterVolumeSpecName: "kube-api-access-hq88c") pod "b43f39ef-60cd-44be-8061-715fbf71a36b" (UID: "b43f39ef-60cd-44be-8061-715fbf71a36b"). InnerVolumeSpecName "kube-api-access-hq88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:01:44 crc kubenswrapper[5003]: I0104 12:01:44.332943 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b43f39ef-60cd-44be-8061-715fbf71a36b-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "b43f39ef-60cd-44be-8061-715fbf71a36b" (UID: "b43f39ef-60cd-44be-8061-715fbf71a36b"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:01:44 crc kubenswrapper[5003]: I0104 12:01:44.424155 5003 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b43f39ef-60cd-44be-8061-715fbf71a36b-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:44 crc kubenswrapper[5003]: I0104 12:01:44.424209 5003 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b43f39ef-60cd-44be-8061-715fbf71a36b-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:44 crc kubenswrapper[5003]: I0104 12:01:44.424226 5003 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b43f39ef-60cd-44be-8061-715fbf71a36b-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:44 crc kubenswrapper[5003]: I0104 12:01:44.424241 5003 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b43f39ef-60cd-44be-8061-715fbf71a36b-console-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:44 crc kubenswrapper[5003]: I0104 12:01:44.424258 5003 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b43f39ef-60cd-44be-8061-715fbf71a36b-service-ca\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:44 crc kubenswrapper[5003]: I0104 12:01:44.424275 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hq88c\" (UniqueName: \"kubernetes.io/projected/b43f39ef-60cd-44be-8061-715fbf71a36b-kube-api-access-hq88c\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:44 crc kubenswrapper[5003]: I0104 12:01:44.424293 5003 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b43f39ef-60cd-44be-8061-715fbf71a36b-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:45 crc kubenswrapper[5003]: I0104 12:01:45.003624 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-2jt99_b43f39ef-60cd-44be-8061-715fbf71a36b/console/0.log" Jan 04 12:01:45 crc kubenswrapper[5003]: I0104 12:01:45.003834 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-2jt99" event={"ID":"b43f39ef-60cd-44be-8061-715fbf71a36b","Type":"ContainerDied","Data":"cfbc6c1c042653e271a7896a1a8776e0c225c047ce28af14de1dbeb53ef506a5"} Jan 04 12:01:45 crc kubenswrapper[5003]: I0104 12:01:45.003873 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-2jt99" Jan 04 12:01:45 crc kubenswrapper[5003]: I0104 12:01:45.003920 5003 scope.go:117] "RemoveContainer" containerID="63c8ee81a5000ef78c9c00b022f2b4b04b7a06dac08763d1177201e2bd0ab263" Jan 04 12:01:45 crc kubenswrapper[5003]: I0104 12:01:45.011971 5003 generic.go:334] "Generic (PLEG): container finished" podID="afaa573c-f32d-423d-a608-1d5443d9b498" containerID="f8a5df705667e44e079a941b4d89ff98bf415448863c53870fc682ae15b74b98" exitCode=0 Jan 04 12:01:45 crc kubenswrapper[5003]: I0104 12:01:45.012055 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s" event={"ID":"afaa573c-f32d-423d-a608-1d5443d9b498","Type":"ContainerDied","Data":"f8a5df705667e44e079a941b4d89ff98bf415448863c53870fc682ae15b74b98"} Jan 04 12:01:45 crc kubenswrapper[5003]: I0104 12:01:45.032184 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-2jt99"] Jan 04 12:01:45 crc kubenswrapper[5003]: I0104 12:01:45.036618 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-2jt99"] Jan 04 12:01:46 crc kubenswrapper[5003]: I0104 12:01:46.022473 5003 generic.go:334] "Generic (PLEG): container finished" podID="afaa573c-f32d-423d-a608-1d5443d9b498" containerID="b852fd9ca069c51f10c77ae7e158bedd363ca0e2f441650bbe60a269caa9e228" exitCode=0 Jan 04 12:01:46 crc kubenswrapper[5003]: I0104 12:01:46.022567 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s" event={"ID":"afaa573c-f32d-423d-a608-1d5443d9b498","Type":"ContainerDied","Data":"b852fd9ca069c51f10c77ae7e158bedd363ca0e2f441650bbe60a269caa9e228"} Jan 04 12:01:46 crc kubenswrapper[5003]: I0104 12:01:46.816153 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b43f39ef-60cd-44be-8061-715fbf71a36b" path="/var/lib/kubelet/pods/b43f39ef-60cd-44be-8061-715fbf71a36b/volumes" Jan 04 12:01:47 crc kubenswrapper[5003]: I0104 12:01:47.034708 5003 generic.go:334] "Generic (PLEG): container finished" podID="afaa573c-f32d-423d-a608-1d5443d9b498" containerID="ad5afdbed3159c4c289a29d29fb4392a8718f237a1a57477ed7c8508a0e14c21" exitCode=0 Jan 04 12:01:47 crc kubenswrapper[5003]: I0104 12:01:47.034815 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s" event={"ID":"afaa573c-f32d-423d-a608-1d5443d9b498","Type":"ContainerDied","Data":"ad5afdbed3159c4c289a29d29fb4392a8718f237a1a57477ed7c8508a0e14c21"} Jan 04 12:01:48 crc kubenswrapper[5003]: I0104 12:01:48.318182 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s" Jan 04 12:01:48 crc kubenswrapper[5003]: I0104 12:01:48.487887 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/afaa573c-f32d-423d-a608-1d5443d9b498-util\") pod \"afaa573c-f32d-423d-a608-1d5443d9b498\" (UID: \"afaa573c-f32d-423d-a608-1d5443d9b498\") " Jan 04 12:01:48 crc kubenswrapper[5003]: I0104 12:01:48.488141 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/afaa573c-f32d-423d-a608-1d5443d9b498-bundle\") pod \"afaa573c-f32d-423d-a608-1d5443d9b498\" (UID: \"afaa573c-f32d-423d-a608-1d5443d9b498\") " Jan 04 12:01:48 crc kubenswrapper[5003]: I0104 12:01:48.488288 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bkzjg\" (UniqueName: \"kubernetes.io/projected/afaa573c-f32d-423d-a608-1d5443d9b498-kube-api-access-bkzjg\") pod \"afaa573c-f32d-423d-a608-1d5443d9b498\" (UID: \"afaa573c-f32d-423d-a608-1d5443d9b498\") " Jan 04 12:01:48 crc kubenswrapper[5003]: I0104 12:01:48.491133 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/afaa573c-f32d-423d-a608-1d5443d9b498-bundle" (OuterVolumeSpecName: "bundle") pod "afaa573c-f32d-423d-a608-1d5443d9b498" (UID: "afaa573c-f32d-423d-a608-1d5443d9b498"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:01:48 crc kubenswrapper[5003]: I0104 12:01:48.497190 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afaa573c-f32d-423d-a608-1d5443d9b498-kube-api-access-bkzjg" (OuterVolumeSpecName: "kube-api-access-bkzjg") pod "afaa573c-f32d-423d-a608-1d5443d9b498" (UID: "afaa573c-f32d-423d-a608-1d5443d9b498"). InnerVolumeSpecName "kube-api-access-bkzjg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:01:48 crc kubenswrapper[5003]: I0104 12:01:48.507558 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/afaa573c-f32d-423d-a608-1d5443d9b498-util" (OuterVolumeSpecName: "util") pod "afaa573c-f32d-423d-a608-1d5443d9b498" (UID: "afaa573c-f32d-423d-a608-1d5443d9b498"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:01:48 crc kubenswrapper[5003]: I0104 12:01:48.589823 5003 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/afaa573c-f32d-423d-a608-1d5443d9b498-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:48 crc kubenswrapper[5003]: I0104 12:01:48.589871 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bkzjg\" (UniqueName: \"kubernetes.io/projected/afaa573c-f32d-423d-a608-1d5443d9b498-kube-api-access-bkzjg\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:48 crc kubenswrapper[5003]: I0104 12:01:48.589886 5003 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/afaa573c-f32d-423d-a608-1d5443d9b498-util\") on node \"crc\" DevicePath \"\"" Jan 04 12:01:49 crc kubenswrapper[5003]: I0104 12:01:49.051801 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s" event={"ID":"afaa573c-f32d-423d-a608-1d5443d9b498","Type":"ContainerDied","Data":"110ee0acd9a843a97393b0ec9c1ac6653b0ab1ea3593becb712f07c28b8ddf1c"} Jan 04 12:01:49 crc kubenswrapper[5003]: I0104 12:01:49.052231 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="110ee0acd9a843a97393b0ec9c1ac6653b0ab1ea3593becb712f07c28b8ddf1c" Jan 04 12:01:49 crc kubenswrapper[5003]: I0104 12:01:49.051860 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s" Jan 04 12:01:57 crc kubenswrapper[5003]: I0104 12:01:57.914682 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-7887cff594-wfltc"] Jan 04 12:01:57 crc kubenswrapper[5003]: E0104 12:01:57.915552 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afaa573c-f32d-423d-a608-1d5443d9b498" containerName="util" Jan 04 12:01:57 crc kubenswrapper[5003]: I0104 12:01:57.915568 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="afaa573c-f32d-423d-a608-1d5443d9b498" containerName="util" Jan 04 12:01:57 crc kubenswrapper[5003]: E0104 12:01:57.915587 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afaa573c-f32d-423d-a608-1d5443d9b498" containerName="extract" Jan 04 12:01:57 crc kubenswrapper[5003]: I0104 12:01:57.915594 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="afaa573c-f32d-423d-a608-1d5443d9b498" containerName="extract" Jan 04 12:01:57 crc kubenswrapper[5003]: E0104 12:01:57.915609 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afaa573c-f32d-423d-a608-1d5443d9b498" containerName="pull" Jan 04 12:01:57 crc kubenswrapper[5003]: I0104 12:01:57.915618 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="afaa573c-f32d-423d-a608-1d5443d9b498" containerName="pull" Jan 04 12:01:57 crc kubenswrapper[5003]: E0104 12:01:57.915823 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b43f39ef-60cd-44be-8061-715fbf71a36b" containerName="console" Jan 04 12:01:57 crc kubenswrapper[5003]: I0104 12:01:57.915830 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b43f39ef-60cd-44be-8061-715fbf71a36b" containerName="console" Jan 04 12:01:57 crc kubenswrapper[5003]: I0104 12:01:57.915955 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="afaa573c-f32d-423d-a608-1d5443d9b498" containerName="extract" Jan 04 12:01:57 crc kubenswrapper[5003]: I0104 12:01:57.915969 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b43f39ef-60cd-44be-8061-715fbf71a36b" containerName="console" Jan 04 12:01:57 crc kubenswrapper[5003]: I0104 12:01:57.916482 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-7887cff594-wfltc" Jan 04 12:01:57 crc kubenswrapper[5003]: I0104 12:01:57.918575 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Jan 04 12:01:57 crc kubenswrapper[5003]: I0104 12:01:57.920170 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Jan 04 12:01:57 crc kubenswrapper[5003]: I0104 12:01:57.920411 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Jan 04 12:01:57 crc kubenswrapper[5003]: I0104 12:01:57.921672 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Jan 04 12:01:57 crc kubenswrapper[5003]: I0104 12:01:57.922415 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-fv48z" Jan 04 12:01:57 crc kubenswrapper[5003]: I0104 12:01:57.967399 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-7887cff594-wfltc"] Jan 04 12:01:58 crc kubenswrapper[5003]: I0104 12:01:58.039270 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e0b25671-0ecf-4f1e-a6ba-700b4fe3fb93-apiservice-cert\") pod \"metallb-operator-controller-manager-7887cff594-wfltc\" (UID: \"e0b25671-0ecf-4f1e-a6ba-700b4fe3fb93\") " pod="metallb-system/metallb-operator-controller-manager-7887cff594-wfltc" Jan 04 12:01:58 crc kubenswrapper[5003]: I0104 12:01:58.039359 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96rjw\" (UniqueName: \"kubernetes.io/projected/e0b25671-0ecf-4f1e-a6ba-700b4fe3fb93-kube-api-access-96rjw\") pod \"metallb-operator-controller-manager-7887cff594-wfltc\" (UID: \"e0b25671-0ecf-4f1e-a6ba-700b4fe3fb93\") " pod="metallb-system/metallb-operator-controller-manager-7887cff594-wfltc" Jan 04 12:01:58 crc kubenswrapper[5003]: I0104 12:01:58.039384 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e0b25671-0ecf-4f1e-a6ba-700b4fe3fb93-webhook-cert\") pod \"metallb-operator-controller-manager-7887cff594-wfltc\" (UID: \"e0b25671-0ecf-4f1e-a6ba-700b4fe3fb93\") " pod="metallb-system/metallb-operator-controller-manager-7887cff594-wfltc" Jan 04 12:01:58 crc kubenswrapper[5003]: I0104 12:01:58.140783 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96rjw\" (UniqueName: \"kubernetes.io/projected/e0b25671-0ecf-4f1e-a6ba-700b4fe3fb93-kube-api-access-96rjw\") pod \"metallb-operator-controller-manager-7887cff594-wfltc\" (UID: \"e0b25671-0ecf-4f1e-a6ba-700b4fe3fb93\") " pod="metallb-system/metallb-operator-controller-manager-7887cff594-wfltc" Jan 04 12:01:58 crc kubenswrapper[5003]: I0104 12:01:58.140837 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e0b25671-0ecf-4f1e-a6ba-700b4fe3fb93-webhook-cert\") pod \"metallb-operator-controller-manager-7887cff594-wfltc\" (UID: \"e0b25671-0ecf-4f1e-a6ba-700b4fe3fb93\") " pod="metallb-system/metallb-operator-controller-manager-7887cff594-wfltc" Jan 04 12:01:58 crc kubenswrapper[5003]: I0104 12:01:58.140884 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e0b25671-0ecf-4f1e-a6ba-700b4fe3fb93-apiservice-cert\") pod \"metallb-operator-controller-manager-7887cff594-wfltc\" (UID: \"e0b25671-0ecf-4f1e-a6ba-700b4fe3fb93\") " pod="metallb-system/metallb-operator-controller-manager-7887cff594-wfltc" Jan 04 12:01:58 crc kubenswrapper[5003]: I0104 12:01:58.148278 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e0b25671-0ecf-4f1e-a6ba-700b4fe3fb93-apiservice-cert\") pod \"metallb-operator-controller-manager-7887cff594-wfltc\" (UID: \"e0b25671-0ecf-4f1e-a6ba-700b4fe3fb93\") " pod="metallb-system/metallb-operator-controller-manager-7887cff594-wfltc" Jan 04 12:01:58 crc kubenswrapper[5003]: I0104 12:01:58.152636 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e0b25671-0ecf-4f1e-a6ba-700b4fe3fb93-webhook-cert\") pod \"metallb-operator-controller-manager-7887cff594-wfltc\" (UID: \"e0b25671-0ecf-4f1e-a6ba-700b4fe3fb93\") " pod="metallb-system/metallb-operator-controller-manager-7887cff594-wfltc" Jan 04 12:01:58 crc kubenswrapper[5003]: I0104 12:01:58.166860 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-5c4fdcc4bf-rtzgn"] Jan 04 12:01:58 crc kubenswrapper[5003]: I0104 12:01:58.167960 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5c4fdcc4bf-rtzgn" Jan 04 12:01:58 crc kubenswrapper[5003]: I0104 12:01:58.170657 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Jan 04 12:01:58 crc kubenswrapper[5003]: I0104 12:01:58.170988 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Jan 04 12:01:58 crc kubenswrapper[5003]: I0104 12:01:58.171211 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-b67x5" Jan 04 12:01:58 crc kubenswrapper[5003]: I0104 12:01:58.179886 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96rjw\" (UniqueName: \"kubernetes.io/projected/e0b25671-0ecf-4f1e-a6ba-700b4fe3fb93-kube-api-access-96rjw\") pod \"metallb-operator-controller-manager-7887cff594-wfltc\" (UID: \"e0b25671-0ecf-4f1e-a6ba-700b4fe3fb93\") " pod="metallb-system/metallb-operator-controller-manager-7887cff594-wfltc" Jan 04 12:01:58 crc kubenswrapper[5003]: I0104 12:01:58.181788 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5c4fdcc4bf-rtzgn"] Jan 04 12:01:58 crc kubenswrapper[5003]: I0104 12:01:58.231565 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-7887cff594-wfltc" Jan 04 12:01:58 crc kubenswrapper[5003]: I0104 12:01:58.343026 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqtb9\" (UniqueName: \"kubernetes.io/projected/8515cdcc-9add-47db-b73e-f72e547a1727-kube-api-access-fqtb9\") pod \"metallb-operator-webhook-server-5c4fdcc4bf-rtzgn\" (UID: \"8515cdcc-9add-47db-b73e-f72e547a1727\") " pod="metallb-system/metallb-operator-webhook-server-5c4fdcc4bf-rtzgn" Jan 04 12:01:58 crc kubenswrapper[5003]: I0104 12:01:58.343382 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8515cdcc-9add-47db-b73e-f72e547a1727-apiservice-cert\") pod \"metallb-operator-webhook-server-5c4fdcc4bf-rtzgn\" (UID: \"8515cdcc-9add-47db-b73e-f72e547a1727\") " pod="metallb-system/metallb-operator-webhook-server-5c4fdcc4bf-rtzgn" Jan 04 12:01:58 crc kubenswrapper[5003]: I0104 12:01:58.343413 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8515cdcc-9add-47db-b73e-f72e547a1727-webhook-cert\") pod \"metallb-operator-webhook-server-5c4fdcc4bf-rtzgn\" (UID: \"8515cdcc-9add-47db-b73e-f72e547a1727\") " pod="metallb-system/metallb-operator-webhook-server-5c4fdcc4bf-rtzgn" Jan 04 12:01:58 crc kubenswrapper[5003]: I0104 12:01:58.444875 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqtb9\" (UniqueName: \"kubernetes.io/projected/8515cdcc-9add-47db-b73e-f72e547a1727-kube-api-access-fqtb9\") pod \"metallb-operator-webhook-server-5c4fdcc4bf-rtzgn\" (UID: \"8515cdcc-9add-47db-b73e-f72e547a1727\") " pod="metallb-system/metallb-operator-webhook-server-5c4fdcc4bf-rtzgn" Jan 04 12:01:58 crc kubenswrapper[5003]: I0104 12:01:58.444940 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8515cdcc-9add-47db-b73e-f72e547a1727-apiservice-cert\") pod \"metallb-operator-webhook-server-5c4fdcc4bf-rtzgn\" (UID: \"8515cdcc-9add-47db-b73e-f72e547a1727\") " pod="metallb-system/metallb-operator-webhook-server-5c4fdcc4bf-rtzgn" Jan 04 12:01:58 crc kubenswrapper[5003]: I0104 12:01:58.444957 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8515cdcc-9add-47db-b73e-f72e547a1727-webhook-cert\") pod \"metallb-operator-webhook-server-5c4fdcc4bf-rtzgn\" (UID: \"8515cdcc-9add-47db-b73e-f72e547a1727\") " pod="metallb-system/metallb-operator-webhook-server-5c4fdcc4bf-rtzgn" Jan 04 12:01:58 crc kubenswrapper[5003]: I0104 12:01:58.451717 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8515cdcc-9add-47db-b73e-f72e547a1727-apiservice-cert\") pod \"metallb-operator-webhook-server-5c4fdcc4bf-rtzgn\" (UID: \"8515cdcc-9add-47db-b73e-f72e547a1727\") " pod="metallb-system/metallb-operator-webhook-server-5c4fdcc4bf-rtzgn" Jan 04 12:01:58 crc kubenswrapper[5003]: I0104 12:01:58.452821 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8515cdcc-9add-47db-b73e-f72e547a1727-webhook-cert\") pod \"metallb-operator-webhook-server-5c4fdcc4bf-rtzgn\" (UID: \"8515cdcc-9add-47db-b73e-f72e547a1727\") " pod="metallb-system/metallb-operator-webhook-server-5c4fdcc4bf-rtzgn" Jan 04 12:01:58 crc kubenswrapper[5003]: I0104 12:01:58.471668 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqtb9\" (UniqueName: \"kubernetes.io/projected/8515cdcc-9add-47db-b73e-f72e547a1727-kube-api-access-fqtb9\") pod \"metallb-operator-webhook-server-5c4fdcc4bf-rtzgn\" (UID: \"8515cdcc-9add-47db-b73e-f72e547a1727\") " pod="metallb-system/metallb-operator-webhook-server-5c4fdcc4bf-rtzgn" Jan 04 12:01:58 crc kubenswrapper[5003]: I0104 12:01:58.513821 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5c4fdcc4bf-rtzgn" Jan 04 12:01:58 crc kubenswrapper[5003]: I0104 12:01:58.562960 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-7887cff594-wfltc"] Jan 04 12:01:58 crc kubenswrapper[5003]: W0104 12:01:58.579382 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode0b25671_0ecf_4f1e_a6ba_700b4fe3fb93.slice/crio-eca076e3b134ed2fe7eb97f0e39f7e625add4491c6a430a5ce9ea4be318a9f70 WatchSource:0}: Error finding container eca076e3b134ed2fe7eb97f0e39f7e625add4491c6a430a5ce9ea4be318a9f70: Status 404 returned error can't find the container with id eca076e3b134ed2fe7eb97f0e39f7e625add4491c6a430a5ce9ea4be318a9f70 Jan 04 12:01:58 crc kubenswrapper[5003]: I0104 12:01:58.739903 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5c4fdcc4bf-rtzgn"] Jan 04 12:01:58 crc kubenswrapper[5003]: W0104 12:01:58.747317 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8515cdcc_9add_47db_b73e_f72e547a1727.slice/crio-492d13f29da408f1b4ea920755e597f0f8e17efa5ad3b4b255155048ae07f8bc WatchSource:0}: Error finding container 492d13f29da408f1b4ea920755e597f0f8e17efa5ad3b4b255155048ae07f8bc: Status 404 returned error can't find the container with id 492d13f29da408f1b4ea920755e597f0f8e17efa5ad3b4b255155048ae07f8bc Jan 04 12:01:59 crc kubenswrapper[5003]: I0104 12:01:59.115584 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7887cff594-wfltc" event={"ID":"e0b25671-0ecf-4f1e-a6ba-700b4fe3fb93","Type":"ContainerStarted","Data":"eca076e3b134ed2fe7eb97f0e39f7e625add4491c6a430a5ce9ea4be318a9f70"} Jan 04 12:01:59 crc kubenswrapper[5003]: I0104 12:01:59.118093 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5c4fdcc4bf-rtzgn" event={"ID":"8515cdcc-9add-47db-b73e-f72e547a1727","Type":"ContainerStarted","Data":"492d13f29da408f1b4ea920755e597f0f8e17efa5ad3b4b255155048ae07f8bc"} Jan 04 12:02:04 crc kubenswrapper[5003]: I0104 12:02:04.152087 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5c4fdcc4bf-rtzgn" event={"ID":"8515cdcc-9add-47db-b73e-f72e547a1727","Type":"ContainerStarted","Data":"f8560cf3324117e76a7be6aefeaabdf3830dcc3be3557be221b16ef47d114b46"} Jan 04 12:02:04 crc kubenswrapper[5003]: I0104 12:02:04.152568 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-5c4fdcc4bf-rtzgn" Jan 04 12:02:04 crc kubenswrapper[5003]: I0104 12:02:04.154762 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7887cff594-wfltc" event={"ID":"e0b25671-0ecf-4f1e-a6ba-700b4fe3fb93","Type":"ContainerStarted","Data":"1b2d44605e4f3c87e8fc582a205d88350d011623d55070eae83fa4e41b5e6232"} Jan 04 12:02:04 crc kubenswrapper[5003]: I0104 12:02:04.154900 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-7887cff594-wfltc" Jan 04 12:02:04 crc kubenswrapper[5003]: I0104 12:02:04.172734 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-5c4fdcc4bf-rtzgn" podStartSLOduration=1.200024356 podStartE2EDuration="6.172693163s" podCreationTimestamp="2026-01-04 12:01:58 +0000 UTC" firstStartedPulling="2026-01-04 12:01:58.750720549 +0000 UTC m=+834.223750390" lastFinishedPulling="2026-01-04 12:02:03.723389356 +0000 UTC m=+839.196419197" observedRunningTime="2026-01-04 12:02:04.169084067 +0000 UTC m=+839.642113918" watchObservedRunningTime="2026-01-04 12:02:04.172693163 +0000 UTC m=+839.645723014" Jan 04 12:02:04 crc kubenswrapper[5003]: I0104 12:02:04.189265 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-7887cff594-wfltc" podStartSLOduration=2.068140817 podStartE2EDuration="7.189244565s" podCreationTimestamp="2026-01-04 12:01:57 +0000 UTC" firstStartedPulling="2026-01-04 12:01:58.583948719 +0000 UTC m=+834.056978570" lastFinishedPulling="2026-01-04 12:02:03.705052477 +0000 UTC m=+839.178082318" observedRunningTime="2026-01-04 12:02:04.188762342 +0000 UTC m=+839.661792193" watchObservedRunningTime="2026-01-04 12:02:04.189244565 +0000 UTC m=+839.662274406" Jan 04 12:02:18 crc kubenswrapper[5003]: I0104 12:02:18.524398 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-5c4fdcc4bf-rtzgn" Jan 04 12:02:38 crc kubenswrapper[5003]: I0104 12:02:38.236167 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-7887cff594-wfltc" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.019511 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7784b6fcf-k5rzq"] Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.020710 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-k5rzq" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.023464 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-mzrk7" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.023852 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.029851 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-sm4fk"] Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.032404 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-sm4fk" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.033768 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7784b6fcf-k5rzq"] Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.034516 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.034637 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.097153 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-gdkc4"] Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.097967 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-gdkc4" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.104286 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/09bc370d-2ed0-4d5b-a050-22d3b22218ae-frr-startup\") pod \"frr-k8s-sm4fk\" (UID: \"09bc370d-2ed0-4d5b-a050-22d3b22218ae\") " pod="metallb-system/frr-k8s-sm4fk" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.104325 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/09bc370d-2ed0-4d5b-a050-22d3b22218ae-metrics-certs\") pod \"frr-k8s-sm4fk\" (UID: \"09bc370d-2ed0-4d5b-a050-22d3b22218ae\") " pod="metallb-system/frr-k8s-sm4fk" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.104347 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/09bc370d-2ed0-4d5b-a050-22d3b22218ae-metrics\") pod \"frr-k8s-sm4fk\" (UID: \"09bc370d-2ed0-4d5b-a050-22d3b22218ae\") " pod="metallb-system/frr-k8s-sm4fk" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.104370 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phwxm\" (UniqueName: \"kubernetes.io/projected/09bc370d-2ed0-4d5b-a050-22d3b22218ae-kube-api-access-phwxm\") pod \"frr-k8s-sm4fk\" (UID: \"09bc370d-2ed0-4d5b-a050-22d3b22218ae\") " pod="metallb-system/frr-k8s-sm4fk" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.104388 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/09bc370d-2ed0-4d5b-a050-22d3b22218ae-frr-conf\") pod \"frr-k8s-sm4fk\" (UID: \"09bc370d-2ed0-4d5b-a050-22d3b22218ae\") " pod="metallb-system/frr-k8s-sm4fk" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.104405 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2878f402-a6b2-4abd-9362-1a33ba2c7cfa-cert\") pod \"frr-k8s-webhook-server-7784b6fcf-k5rzq\" (UID: \"2878f402-a6b2-4abd-9362-1a33ba2c7cfa\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-k5rzq" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.104438 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7qcz\" (UniqueName: \"kubernetes.io/projected/2878f402-a6b2-4abd-9362-1a33ba2c7cfa-kube-api-access-s7qcz\") pod \"frr-k8s-webhook-server-7784b6fcf-k5rzq\" (UID: \"2878f402-a6b2-4abd-9362-1a33ba2c7cfa\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-k5rzq" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.104463 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/09bc370d-2ed0-4d5b-a050-22d3b22218ae-frr-sockets\") pod \"frr-k8s-sm4fk\" (UID: \"09bc370d-2ed0-4d5b-a050-22d3b22218ae\") " pod="metallb-system/frr-k8s-sm4fk" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.104495 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/09bc370d-2ed0-4d5b-a050-22d3b22218ae-reloader\") pod \"frr-k8s-sm4fk\" (UID: \"09bc370d-2ed0-4d5b-a050-22d3b22218ae\") " pod="metallb-system/frr-k8s-sm4fk" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.104743 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.104893 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.104929 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.104758 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-sxmmc" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.113229 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-5bddd4b946-72lpp"] Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.117561 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5bddd4b946-72lpp" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.120088 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.131777 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5bddd4b946-72lpp"] Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.206544 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/428d7c60-fd2b-430e-9a21-e6d1ec480d00-cert\") pod \"controller-5bddd4b946-72lpp\" (UID: \"428d7c60-fd2b-430e-9a21-e6d1ec480d00\") " pod="metallb-system/controller-5bddd4b946-72lpp" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.206607 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqkzf\" (UniqueName: \"kubernetes.io/projected/428d7c60-fd2b-430e-9a21-e6d1ec480d00-kube-api-access-rqkzf\") pod \"controller-5bddd4b946-72lpp\" (UID: \"428d7c60-fd2b-430e-9a21-e6d1ec480d00\") " pod="metallb-system/controller-5bddd4b946-72lpp" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.206691 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/111f1bb7-53d6-4079-a637-f586e03ba8c5-memberlist\") pod \"speaker-gdkc4\" (UID: \"111f1bb7-53d6-4079-a637-f586e03ba8c5\") " pod="metallb-system/speaker-gdkc4" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.206717 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/111f1bb7-53d6-4079-a637-f586e03ba8c5-metallb-excludel2\") pod \"speaker-gdkc4\" (UID: \"111f1bb7-53d6-4079-a637-f586e03ba8c5\") " pod="metallb-system/speaker-gdkc4" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.206779 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/09bc370d-2ed0-4d5b-a050-22d3b22218ae-frr-startup\") pod \"frr-k8s-sm4fk\" (UID: \"09bc370d-2ed0-4d5b-a050-22d3b22218ae\") " pod="metallb-system/frr-k8s-sm4fk" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.206797 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/09bc370d-2ed0-4d5b-a050-22d3b22218ae-metrics-certs\") pod \"frr-k8s-sm4fk\" (UID: \"09bc370d-2ed0-4d5b-a050-22d3b22218ae\") " pod="metallb-system/frr-k8s-sm4fk" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.206817 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/09bc370d-2ed0-4d5b-a050-22d3b22218ae-metrics\") pod \"frr-k8s-sm4fk\" (UID: \"09bc370d-2ed0-4d5b-a050-22d3b22218ae\") " pod="metallb-system/frr-k8s-sm4fk" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.206836 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/111f1bb7-53d6-4079-a637-f586e03ba8c5-metrics-certs\") pod \"speaker-gdkc4\" (UID: \"111f1bb7-53d6-4079-a637-f586e03ba8c5\") " pod="metallb-system/speaker-gdkc4" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.206854 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/428d7c60-fd2b-430e-9a21-e6d1ec480d00-metrics-certs\") pod \"controller-5bddd4b946-72lpp\" (UID: \"428d7c60-fd2b-430e-9a21-e6d1ec480d00\") " pod="metallb-system/controller-5bddd4b946-72lpp" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.206871 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phwxm\" (UniqueName: \"kubernetes.io/projected/09bc370d-2ed0-4d5b-a050-22d3b22218ae-kube-api-access-phwxm\") pod \"frr-k8s-sm4fk\" (UID: \"09bc370d-2ed0-4d5b-a050-22d3b22218ae\") " pod="metallb-system/frr-k8s-sm4fk" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.206887 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/09bc370d-2ed0-4d5b-a050-22d3b22218ae-frr-conf\") pod \"frr-k8s-sm4fk\" (UID: \"09bc370d-2ed0-4d5b-a050-22d3b22218ae\") " pod="metallb-system/frr-k8s-sm4fk" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.206903 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2878f402-a6b2-4abd-9362-1a33ba2c7cfa-cert\") pod \"frr-k8s-webhook-server-7784b6fcf-k5rzq\" (UID: \"2878f402-a6b2-4abd-9362-1a33ba2c7cfa\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-k5rzq" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.206938 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7qcz\" (UniqueName: \"kubernetes.io/projected/2878f402-a6b2-4abd-9362-1a33ba2c7cfa-kube-api-access-s7qcz\") pod \"frr-k8s-webhook-server-7784b6fcf-k5rzq\" (UID: \"2878f402-a6b2-4abd-9362-1a33ba2c7cfa\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-k5rzq" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.206960 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/09bc370d-2ed0-4d5b-a050-22d3b22218ae-frr-sockets\") pod \"frr-k8s-sm4fk\" (UID: \"09bc370d-2ed0-4d5b-a050-22d3b22218ae\") " pod="metallb-system/frr-k8s-sm4fk" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.206979 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gvjm\" (UniqueName: \"kubernetes.io/projected/111f1bb7-53d6-4079-a637-f586e03ba8c5-kube-api-access-5gvjm\") pod \"speaker-gdkc4\" (UID: \"111f1bb7-53d6-4079-a637-f586e03ba8c5\") " pod="metallb-system/speaker-gdkc4" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.207005 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/09bc370d-2ed0-4d5b-a050-22d3b22218ae-reloader\") pod \"frr-k8s-sm4fk\" (UID: \"09bc370d-2ed0-4d5b-a050-22d3b22218ae\") " pod="metallb-system/frr-k8s-sm4fk" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.207959 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/09bc370d-2ed0-4d5b-a050-22d3b22218ae-reloader\") pod \"frr-k8s-sm4fk\" (UID: \"09bc370d-2ed0-4d5b-a050-22d3b22218ae\") " pod="metallb-system/frr-k8s-sm4fk" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.207981 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/09bc370d-2ed0-4d5b-a050-22d3b22218ae-frr-startup\") pod \"frr-k8s-sm4fk\" (UID: \"09bc370d-2ed0-4d5b-a050-22d3b22218ae\") " pod="metallb-system/frr-k8s-sm4fk" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.208255 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/09bc370d-2ed0-4d5b-a050-22d3b22218ae-frr-conf\") pod \"frr-k8s-sm4fk\" (UID: \"09bc370d-2ed0-4d5b-a050-22d3b22218ae\") " pod="metallb-system/frr-k8s-sm4fk" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.208447 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/09bc370d-2ed0-4d5b-a050-22d3b22218ae-metrics\") pod \"frr-k8s-sm4fk\" (UID: \"09bc370d-2ed0-4d5b-a050-22d3b22218ae\") " pod="metallb-system/frr-k8s-sm4fk" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.209220 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/09bc370d-2ed0-4d5b-a050-22d3b22218ae-frr-sockets\") pod \"frr-k8s-sm4fk\" (UID: \"09bc370d-2ed0-4d5b-a050-22d3b22218ae\") " pod="metallb-system/frr-k8s-sm4fk" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.216829 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/09bc370d-2ed0-4d5b-a050-22d3b22218ae-metrics-certs\") pod \"frr-k8s-sm4fk\" (UID: \"09bc370d-2ed0-4d5b-a050-22d3b22218ae\") " pod="metallb-system/frr-k8s-sm4fk" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.220968 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2878f402-a6b2-4abd-9362-1a33ba2c7cfa-cert\") pod \"frr-k8s-webhook-server-7784b6fcf-k5rzq\" (UID: \"2878f402-a6b2-4abd-9362-1a33ba2c7cfa\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-k5rzq" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.229632 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7qcz\" (UniqueName: \"kubernetes.io/projected/2878f402-a6b2-4abd-9362-1a33ba2c7cfa-kube-api-access-s7qcz\") pod \"frr-k8s-webhook-server-7784b6fcf-k5rzq\" (UID: \"2878f402-a6b2-4abd-9362-1a33ba2c7cfa\") " pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-k5rzq" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.231261 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phwxm\" (UniqueName: \"kubernetes.io/projected/09bc370d-2ed0-4d5b-a050-22d3b22218ae-kube-api-access-phwxm\") pod \"frr-k8s-sm4fk\" (UID: \"09bc370d-2ed0-4d5b-a050-22d3b22218ae\") " pod="metallb-system/frr-k8s-sm4fk" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.308174 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/428d7c60-fd2b-430e-9a21-e6d1ec480d00-metrics-certs\") pod \"controller-5bddd4b946-72lpp\" (UID: \"428d7c60-fd2b-430e-9a21-e6d1ec480d00\") " pod="metallb-system/controller-5bddd4b946-72lpp" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.308229 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/111f1bb7-53d6-4079-a637-f586e03ba8c5-metrics-certs\") pod \"speaker-gdkc4\" (UID: \"111f1bb7-53d6-4079-a637-f586e03ba8c5\") " pod="metallb-system/speaker-gdkc4" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.308275 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gvjm\" (UniqueName: \"kubernetes.io/projected/111f1bb7-53d6-4079-a637-f586e03ba8c5-kube-api-access-5gvjm\") pod \"speaker-gdkc4\" (UID: \"111f1bb7-53d6-4079-a637-f586e03ba8c5\") " pod="metallb-system/speaker-gdkc4" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.308305 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqkzf\" (UniqueName: \"kubernetes.io/projected/428d7c60-fd2b-430e-9a21-e6d1ec480d00-kube-api-access-rqkzf\") pod \"controller-5bddd4b946-72lpp\" (UID: \"428d7c60-fd2b-430e-9a21-e6d1ec480d00\") " pod="metallb-system/controller-5bddd4b946-72lpp" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.308319 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/428d7c60-fd2b-430e-9a21-e6d1ec480d00-cert\") pod \"controller-5bddd4b946-72lpp\" (UID: \"428d7c60-fd2b-430e-9a21-e6d1ec480d00\") " pod="metallb-system/controller-5bddd4b946-72lpp" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.308351 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/111f1bb7-53d6-4079-a637-f586e03ba8c5-memberlist\") pod \"speaker-gdkc4\" (UID: \"111f1bb7-53d6-4079-a637-f586e03ba8c5\") " pod="metallb-system/speaker-gdkc4" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.308370 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/111f1bb7-53d6-4079-a637-f586e03ba8c5-metallb-excludel2\") pod \"speaker-gdkc4\" (UID: \"111f1bb7-53d6-4079-a637-f586e03ba8c5\") " pod="metallb-system/speaker-gdkc4" Jan 04 12:02:39 crc kubenswrapper[5003]: E0104 12:02:39.308477 5003 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Jan 04 12:02:39 crc kubenswrapper[5003]: E0104 12:02:39.308568 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/111f1bb7-53d6-4079-a637-f586e03ba8c5-metrics-certs podName:111f1bb7-53d6-4079-a637-f586e03ba8c5 nodeName:}" failed. No retries permitted until 2026-01-04 12:02:39.808548025 +0000 UTC m=+875.281577866 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/111f1bb7-53d6-4079-a637-f586e03ba8c5-metrics-certs") pod "speaker-gdkc4" (UID: "111f1bb7-53d6-4079-a637-f586e03ba8c5") : secret "speaker-certs-secret" not found Jan 04 12:02:39 crc kubenswrapper[5003]: E0104 12:02:39.308710 5003 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Jan 04 12:02:39 crc kubenswrapper[5003]: E0104 12:02:39.308852 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/111f1bb7-53d6-4079-a637-f586e03ba8c5-memberlist podName:111f1bb7-53d6-4079-a637-f586e03ba8c5 nodeName:}" failed. No retries permitted until 2026-01-04 12:02:39.808810382 +0000 UTC m=+875.281840223 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/111f1bb7-53d6-4079-a637-f586e03ba8c5-memberlist") pod "speaker-gdkc4" (UID: "111f1bb7-53d6-4079-a637-f586e03ba8c5") : secret "metallb-memberlist" not found Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.309106 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/111f1bb7-53d6-4079-a637-f586e03ba8c5-metallb-excludel2\") pod \"speaker-gdkc4\" (UID: \"111f1bb7-53d6-4079-a637-f586e03ba8c5\") " pod="metallb-system/speaker-gdkc4" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.311560 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/428d7c60-fd2b-430e-9a21-e6d1ec480d00-metrics-certs\") pod \"controller-5bddd4b946-72lpp\" (UID: \"428d7c60-fd2b-430e-9a21-e6d1ec480d00\") " pod="metallb-system/controller-5bddd4b946-72lpp" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.312304 5003 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.323554 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/428d7c60-fd2b-430e-9a21-e6d1ec480d00-cert\") pod \"controller-5bddd4b946-72lpp\" (UID: \"428d7c60-fd2b-430e-9a21-e6d1ec480d00\") " pod="metallb-system/controller-5bddd4b946-72lpp" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.330385 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqkzf\" (UniqueName: \"kubernetes.io/projected/428d7c60-fd2b-430e-9a21-e6d1ec480d00-kube-api-access-rqkzf\") pod \"controller-5bddd4b946-72lpp\" (UID: \"428d7c60-fd2b-430e-9a21-e6d1ec480d00\") " pod="metallb-system/controller-5bddd4b946-72lpp" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.330786 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gvjm\" (UniqueName: \"kubernetes.io/projected/111f1bb7-53d6-4079-a637-f586e03ba8c5-kube-api-access-5gvjm\") pod \"speaker-gdkc4\" (UID: \"111f1bb7-53d6-4079-a637-f586e03ba8c5\") " pod="metallb-system/speaker-gdkc4" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.348326 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-k5rzq" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.355808 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-sm4fk" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.433128 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5bddd4b946-72lpp" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.583674 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7784b6fcf-k5rzq"] Jan 04 12:02:39 crc kubenswrapper[5003]: W0104 12:02:39.589086 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2878f402_a6b2_4abd_9362_1a33ba2c7cfa.slice/crio-88aad748be3be591c4a22d4afbee53d8b004390909c88b996a79751640f833ac WatchSource:0}: Error finding container 88aad748be3be591c4a22d4afbee53d8b004390909c88b996a79751640f833ac: Status 404 returned error can't find the container with id 88aad748be3be591c4a22d4afbee53d8b004390909c88b996a79751640f833ac Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.655324 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5bddd4b946-72lpp"] Jan 04 12:02:39 crc kubenswrapper[5003]: W0104 12:02:39.660092 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod428d7c60_fd2b_430e_9a21_e6d1ec480d00.slice/crio-59c66492d4bace113a1a69bf9fa531cd42f20a228800a823bd4216641e8ff622 WatchSource:0}: Error finding container 59c66492d4bace113a1a69bf9fa531cd42f20a228800a823bd4216641e8ff622: Status 404 returned error can't find the container with id 59c66492d4bace113a1a69bf9fa531cd42f20a228800a823bd4216641e8ff622 Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.823109 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/111f1bb7-53d6-4079-a637-f586e03ba8c5-memberlist\") pod \"speaker-gdkc4\" (UID: \"111f1bb7-53d6-4079-a637-f586e03ba8c5\") " pod="metallb-system/speaker-gdkc4" Jan 04 12:02:39 crc kubenswrapper[5003]: E0104 12:02:39.823269 5003 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Jan 04 12:02:39 crc kubenswrapper[5003]: E0104 12:02:39.823453 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/111f1bb7-53d6-4079-a637-f586e03ba8c5-memberlist podName:111f1bb7-53d6-4079-a637-f586e03ba8c5 nodeName:}" failed. No retries permitted until 2026-01-04 12:02:40.823430173 +0000 UTC m=+876.296460014 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/111f1bb7-53d6-4079-a637-f586e03ba8c5-memberlist") pod "speaker-gdkc4" (UID: "111f1bb7-53d6-4079-a637-f586e03ba8c5") : secret "metallb-memberlist" not found Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.823867 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/111f1bb7-53d6-4079-a637-f586e03ba8c5-metrics-certs\") pod \"speaker-gdkc4\" (UID: \"111f1bb7-53d6-4079-a637-f586e03ba8c5\") " pod="metallb-system/speaker-gdkc4" Jan 04 12:02:39 crc kubenswrapper[5003]: I0104 12:02:39.833885 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/111f1bb7-53d6-4079-a637-f586e03ba8c5-metrics-certs\") pod \"speaker-gdkc4\" (UID: \"111f1bb7-53d6-4079-a637-f586e03ba8c5\") " pod="metallb-system/speaker-gdkc4" Jan 04 12:02:40 crc kubenswrapper[5003]: I0104 12:02:40.389793 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-k5rzq" event={"ID":"2878f402-a6b2-4abd-9362-1a33ba2c7cfa","Type":"ContainerStarted","Data":"88aad748be3be591c4a22d4afbee53d8b004390909c88b996a79751640f833ac"} Jan 04 12:02:40 crc kubenswrapper[5003]: I0104 12:02:40.391844 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-sm4fk" event={"ID":"09bc370d-2ed0-4d5b-a050-22d3b22218ae","Type":"ContainerStarted","Data":"9bb26eb87e6cf0f1a5cd462a27a152dcc41ac7023360a24689411ffbae6537e2"} Jan 04 12:02:40 crc kubenswrapper[5003]: I0104 12:02:40.394345 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5bddd4b946-72lpp" event={"ID":"428d7c60-fd2b-430e-9a21-e6d1ec480d00","Type":"ContainerStarted","Data":"bebabaafa051d3f96b938cfac7706e6ccced96cab84c4ce5d2c8555415d98030"} Jan 04 12:02:40 crc kubenswrapper[5003]: I0104 12:02:40.394377 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5bddd4b946-72lpp" event={"ID":"428d7c60-fd2b-430e-9a21-e6d1ec480d00","Type":"ContainerStarted","Data":"ac3ee83f4a0b5d76db31d83a68df6fcf56a583404e8d6637097e3f5b9de88519"} Jan 04 12:02:40 crc kubenswrapper[5003]: I0104 12:02:40.394391 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5bddd4b946-72lpp" event={"ID":"428d7c60-fd2b-430e-9a21-e6d1ec480d00","Type":"ContainerStarted","Data":"59c66492d4bace113a1a69bf9fa531cd42f20a228800a823bd4216641e8ff622"} Jan 04 12:02:40 crc kubenswrapper[5003]: I0104 12:02:40.394553 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-5bddd4b946-72lpp" Jan 04 12:02:40 crc kubenswrapper[5003]: I0104 12:02:40.416572 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-5bddd4b946-72lpp" podStartSLOduration=1.416554909 podStartE2EDuration="1.416554909s" podCreationTimestamp="2026-01-04 12:02:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:02:40.412341767 +0000 UTC m=+875.885371608" watchObservedRunningTime="2026-01-04 12:02:40.416554909 +0000 UTC m=+875.889584750" Jan 04 12:02:40 crc kubenswrapper[5003]: I0104 12:02:40.839841 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/111f1bb7-53d6-4079-a637-f586e03ba8c5-memberlist\") pod \"speaker-gdkc4\" (UID: \"111f1bb7-53d6-4079-a637-f586e03ba8c5\") " pod="metallb-system/speaker-gdkc4" Jan 04 12:02:40 crc kubenswrapper[5003]: I0104 12:02:40.858006 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/111f1bb7-53d6-4079-a637-f586e03ba8c5-memberlist\") pod \"speaker-gdkc4\" (UID: \"111f1bb7-53d6-4079-a637-f586e03ba8c5\") " pod="metallb-system/speaker-gdkc4" Jan 04 12:02:40 crc kubenswrapper[5003]: I0104 12:02:40.919001 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-gdkc4" Jan 04 12:02:41 crc kubenswrapper[5003]: I0104 12:02:41.410456 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-gdkc4" event={"ID":"111f1bb7-53d6-4079-a637-f586e03ba8c5","Type":"ContainerStarted","Data":"1cd90344d0d611eb9ccbcfb7f7d897cec5d4f96e308bf15eee87c24cc9225f27"} Jan 04 12:02:41 crc kubenswrapper[5003]: I0104 12:02:41.411409 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-gdkc4" event={"ID":"111f1bb7-53d6-4079-a637-f586e03ba8c5","Type":"ContainerStarted","Data":"282ef5984fcbb828662df37a2e97cc174b9e4ad611f2e74cacc306c7d5e6c06b"} Jan 04 12:02:42 crc kubenswrapper[5003]: I0104 12:02:42.428094 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-gdkc4" event={"ID":"111f1bb7-53d6-4079-a637-f586e03ba8c5","Type":"ContainerStarted","Data":"433c24d2798e65b12b530a0717efb71b69f81f719013fb7b362108049c240a8b"} Jan 04 12:02:42 crc kubenswrapper[5003]: I0104 12:02:42.428273 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-gdkc4" Jan 04 12:02:42 crc kubenswrapper[5003]: I0104 12:02:42.454384 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-gdkc4" podStartSLOduration=3.454362921 podStartE2EDuration="3.454362921s" podCreationTimestamp="2026-01-04 12:02:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:02:42.44908772 +0000 UTC m=+877.922117581" watchObservedRunningTime="2026-01-04 12:02:42.454362921 +0000 UTC m=+877.927392762" Jan 04 12:02:48 crc kubenswrapper[5003]: I0104 12:02:48.516961 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-k5rzq" event={"ID":"2878f402-a6b2-4abd-9362-1a33ba2c7cfa","Type":"ContainerStarted","Data":"a8595698aac26d505e83535db57e3d3219df9eb351f6f74db39e07b9d5a13546"} Jan 04 12:02:48 crc kubenswrapper[5003]: I0104 12:02:48.517518 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-k5rzq" Jan 04 12:02:48 crc kubenswrapper[5003]: I0104 12:02:48.519901 5003 generic.go:334] "Generic (PLEG): container finished" podID="09bc370d-2ed0-4d5b-a050-22d3b22218ae" containerID="94451f806b94de8243c03078b0a3632a64c5e16242759f1af6270f0cad7676c1" exitCode=0 Jan 04 12:02:48 crc kubenswrapper[5003]: I0104 12:02:48.519956 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-sm4fk" event={"ID":"09bc370d-2ed0-4d5b-a050-22d3b22218ae","Type":"ContainerDied","Data":"94451f806b94de8243c03078b0a3632a64c5e16242759f1af6270f0cad7676c1"} Jan 04 12:02:48 crc kubenswrapper[5003]: I0104 12:02:48.538924 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-k5rzq" podStartSLOduration=2.314863078 podStartE2EDuration="10.538904152s" podCreationTimestamp="2026-01-04 12:02:38 +0000 UTC" firstStartedPulling="2026-01-04 12:02:39.592829867 +0000 UTC m=+875.065859708" lastFinishedPulling="2026-01-04 12:02:47.816870941 +0000 UTC m=+883.289900782" observedRunningTime="2026-01-04 12:02:48.536353964 +0000 UTC m=+884.009383875" watchObservedRunningTime="2026-01-04 12:02:48.538904152 +0000 UTC m=+884.011933993" Jan 04 12:02:49 crc kubenswrapper[5003]: I0104 12:02:49.439853 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-5bddd4b946-72lpp" Jan 04 12:02:49 crc kubenswrapper[5003]: I0104 12:02:49.532130 5003 generic.go:334] "Generic (PLEG): container finished" podID="09bc370d-2ed0-4d5b-a050-22d3b22218ae" containerID="6d44c60efb22a7d138507f0423976b80b828b8bfb35072567b3505c05e0b21a2" exitCode=0 Jan 04 12:02:49 crc kubenswrapper[5003]: I0104 12:02:49.532233 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-sm4fk" event={"ID":"09bc370d-2ed0-4d5b-a050-22d3b22218ae","Type":"ContainerDied","Data":"6d44c60efb22a7d138507f0423976b80b828b8bfb35072567b3505c05e0b21a2"} Jan 04 12:02:50 crc kubenswrapper[5003]: I0104 12:02:50.545494 5003 generic.go:334] "Generic (PLEG): container finished" podID="09bc370d-2ed0-4d5b-a050-22d3b22218ae" containerID="6594ec35a0546a66d7ed834df8ddd29b9c2159f4b85c4d546a2c16835d64724f" exitCode=0 Jan 04 12:02:50 crc kubenswrapper[5003]: I0104 12:02:50.545588 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-sm4fk" event={"ID":"09bc370d-2ed0-4d5b-a050-22d3b22218ae","Type":"ContainerDied","Data":"6594ec35a0546a66d7ed834df8ddd29b9c2159f4b85c4d546a2c16835d64724f"} Jan 04 12:02:51 crc kubenswrapper[5003]: I0104 12:02:51.556795 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-sm4fk" event={"ID":"09bc370d-2ed0-4d5b-a050-22d3b22218ae","Type":"ContainerStarted","Data":"f56b6ab49d660f893e0711dde7c0ed026b9fcf4bd2b14c77947d0e141fd8d557"} Jan 04 12:02:51 crc kubenswrapper[5003]: I0104 12:02:51.557389 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-sm4fk" event={"ID":"09bc370d-2ed0-4d5b-a050-22d3b22218ae","Type":"ContainerStarted","Data":"2cfd5914466a52cb4c58f7c1522bfb0735cd0887037169c5bd1b31f3693c3fc1"} Jan 04 12:02:51 crc kubenswrapper[5003]: I0104 12:02:51.557462 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-sm4fk" event={"ID":"09bc370d-2ed0-4d5b-a050-22d3b22218ae","Type":"ContainerStarted","Data":"01c7b03e34c34947afa01f210375bf74032d2fbf0bd5f00c8bb741e5ee3d7375"} Jan 04 12:02:51 crc kubenswrapper[5003]: I0104 12:02:51.557518 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-sm4fk" event={"ID":"09bc370d-2ed0-4d5b-a050-22d3b22218ae","Type":"ContainerStarted","Data":"afe90296890f0ee7d3e83530f141b7c962ebf536b552fdada139439b8258cf5a"} Jan 04 12:02:51 crc kubenswrapper[5003]: I0104 12:02:51.557615 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-sm4fk" event={"ID":"09bc370d-2ed0-4d5b-a050-22d3b22218ae","Type":"ContainerStarted","Data":"0008804856e604d0d18fdc2e4e228eb7ec467bd643d4f18e3b956ed4a55fd4e9"} Jan 04 12:02:51 crc kubenswrapper[5003]: I0104 12:02:51.557695 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-sm4fk" event={"ID":"09bc370d-2ed0-4d5b-a050-22d3b22218ae","Type":"ContainerStarted","Data":"49aacd372ddc05135f35f8382ef811774d93d951865564cc325ba663467d3dc3"} Jan 04 12:02:51 crc kubenswrapper[5003]: I0104 12:02:51.557774 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-sm4fk" Jan 04 12:02:51 crc kubenswrapper[5003]: I0104 12:02:51.581755 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-sm4fk" podStartSLOduration=5.284624855 podStartE2EDuration="13.581736081s" podCreationTimestamp="2026-01-04 12:02:38 +0000 UTC" firstStartedPulling="2026-01-04 12:02:39.511502996 +0000 UTC m=+874.984532837" lastFinishedPulling="2026-01-04 12:02:47.808614202 +0000 UTC m=+883.281644063" observedRunningTime="2026-01-04 12:02:51.580088188 +0000 UTC m=+887.053118029" watchObservedRunningTime="2026-01-04 12:02:51.581736081 +0000 UTC m=+887.054765932" Jan 04 12:02:54 crc kubenswrapper[5003]: I0104 12:02:54.356396 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-sm4fk" Jan 04 12:02:54 crc kubenswrapper[5003]: I0104 12:02:54.411141 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-sm4fk" Jan 04 12:02:59 crc kubenswrapper[5003]: I0104 12:02:59.357702 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7784b6fcf-k5rzq" Jan 04 12:03:00 crc kubenswrapper[5003]: I0104 12:03:00.925758 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-gdkc4" Jan 04 12:03:02 crc kubenswrapper[5003]: I0104 12:03:02.714944 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8"] Jan 04 12:03:02 crc kubenswrapper[5003]: I0104 12:03:02.717643 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8" Jan 04 12:03:02 crc kubenswrapper[5003]: I0104 12:03:02.720271 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 04 12:03:02 crc kubenswrapper[5003]: I0104 12:03:02.731632 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8"] Jan 04 12:03:02 crc kubenswrapper[5003]: I0104 12:03:02.777779 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkpng\" (UniqueName: \"kubernetes.io/projected/c70c4c5a-820d-4671-a4a8-dae25ff5f3f3-kube-api-access-bkpng\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8\" (UID: \"c70c4c5a-820d-4671-a4a8-dae25ff5f3f3\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8" Jan 04 12:03:02 crc kubenswrapper[5003]: I0104 12:03:02.777948 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c70c4c5a-820d-4671-a4a8-dae25ff5f3f3-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8\" (UID: \"c70c4c5a-820d-4671-a4a8-dae25ff5f3f3\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8" Jan 04 12:03:02 crc kubenswrapper[5003]: I0104 12:03:02.778120 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c70c4c5a-820d-4671-a4a8-dae25ff5f3f3-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8\" (UID: \"c70c4c5a-820d-4671-a4a8-dae25ff5f3f3\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8" Jan 04 12:03:02 crc kubenswrapper[5003]: I0104 12:03:02.879299 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkpng\" (UniqueName: \"kubernetes.io/projected/c70c4c5a-820d-4671-a4a8-dae25ff5f3f3-kube-api-access-bkpng\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8\" (UID: \"c70c4c5a-820d-4671-a4a8-dae25ff5f3f3\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8" Jan 04 12:03:02 crc kubenswrapper[5003]: I0104 12:03:02.879364 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c70c4c5a-820d-4671-a4a8-dae25ff5f3f3-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8\" (UID: \"c70c4c5a-820d-4671-a4a8-dae25ff5f3f3\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8" Jan 04 12:03:02 crc kubenswrapper[5003]: I0104 12:03:02.879420 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c70c4c5a-820d-4671-a4a8-dae25ff5f3f3-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8\" (UID: \"c70c4c5a-820d-4671-a4a8-dae25ff5f3f3\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8" Jan 04 12:03:02 crc kubenswrapper[5003]: I0104 12:03:02.879863 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c70c4c5a-820d-4671-a4a8-dae25ff5f3f3-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8\" (UID: \"c70c4c5a-820d-4671-a4a8-dae25ff5f3f3\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8" Jan 04 12:03:02 crc kubenswrapper[5003]: I0104 12:03:02.880158 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c70c4c5a-820d-4671-a4a8-dae25ff5f3f3-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8\" (UID: \"c70c4c5a-820d-4671-a4a8-dae25ff5f3f3\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8" Jan 04 12:03:02 crc kubenswrapper[5003]: I0104 12:03:02.909390 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkpng\" (UniqueName: \"kubernetes.io/projected/c70c4c5a-820d-4671-a4a8-dae25ff5f3f3-kube-api-access-bkpng\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8\" (UID: \"c70c4c5a-820d-4671-a4a8-dae25ff5f3f3\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8" Jan 04 12:03:03 crc kubenswrapper[5003]: I0104 12:03:03.045988 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8" Jan 04 12:03:03 crc kubenswrapper[5003]: W0104 12:03:03.514589 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc70c4c5a_820d_4671_a4a8_dae25ff5f3f3.slice/crio-22ac86aef7867ab624edb87d25db601214d71456e8ce090c81ab01113b17a0d3 WatchSource:0}: Error finding container 22ac86aef7867ab624edb87d25db601214d71456e8ce090c81ab01113b17a0d3: Status 404 returned error can't find the container with id 22ac86aef7867ab624edb87d25db601214d71456e8ce090c81ab01113b17a0d3 Jan 04 12:03:03 crc kubenswrapper[5003]: I0104 12:03:03.517162 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8"] Jan 04 12:03:03 crc kubenswrapper[5003]: I0104 12:03:03.655826 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8" event={"ID":"c70c4c5a-820d-4671-a4a8-dae25ff5f3f3","Type":"ContainerStarted","Data":"22ac86aef7867ab624edb87d25db601214d71456e8ce090c81ab01113b17a0d3"} Jan 04 12:03:04 crc kubenswrapper[5003]: I0104 12:03:04.671209 5003 generic.go:334] "Generic (PLEG): container finished" podID="c70c4c5a-820d-4671-a4a8-dae25ff5f3f3" containerID="a5bc3fe75795d8e051203a07adaea16c9ccd8e51e8f51d8c2ed013b0f9452ab9" exitCode=0 Jan 04 12:03:04 crc kubenswrapper[5003]: I0104 12:03:04.671270 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8" event={"ID":"c70c4c5a-820d-4671-a4a8-dae25ff5f3f3","Type":"ContainerDied","Data":"a5bc3fe75795d8e051203a07adaea16c9ccd8e51e8f51d8c2ed013b0f9452ab9"} Jan 04 12:03:08 crc kubenswrapper[5003]: I0104 12:03:08.719306 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8" event={"ID":"c70c4c5a-820d-4671-a4a8-dae25ff5f3f3","Type":"ContainerStarted","Data":"6b72af09b23c2c7cee7728cc6823fb6c27002e20e338507b83ade20c06677fe6"} Jan 04 12:03:09 crc kubenswrapper[5003]: I0104 12:03:09.366847 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-sm4fk" Jan 04 12:03:09 crc kubenswrapper[5003]: I0104 12:03:09.763224 5003 generic.go:334] "Generic (PLEG): container finished" podID="c70c4c5a-820d-4671-a4a8-dae25ff5f3f3" containerID="6b72af09b23c2c7cee7728cc6823fb6c27002e20e338507b83ade20c06677fe6" exitCode=0 Jan 04 12:03:09 crc kubenswrapper[5003]: I0104 12:03:09.763275 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8" event={"ID":"c70c4c5a-820d-4671-a4a8-dae25ff5f3f3","Type":"ContainerDied","Data":"6b72af09b23c2c7cee7728cc6823fb6c27002e20e338507b83ade20c06677fe6"} Jan 04 12:03:10 crc kubenswrapper[5003]: I0104 12:03:10.651739 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-882nr"] Jan 04 12:03:10 crc kubenswrapper[5003]: I0104 12:03:10.653588 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-882nr" Jan 04 12:03:10 crc kubenswrapper[5003]: I0104 12:03:10.669694 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-882nr"] Jan 04 12:03:10 crc kubenswrapper[5003]: I0104 12:03:10.690999 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f244a26-2db3-4873-a3c4-11677f61d56b-catalog-content\") pod \"redhat-marketplace-882nr\" (UID: \"8f244a26-2db3-4873-a3c4-11677f61d56b\") " pod="openshift-marketplace/redhat-marketplace-882nr" Jan 04 12:03:10 crc kubenswrapper[5003]: I0104 12:03:10.691149 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrl64\" (UniqueName: \"kubernetes.io/projected/8f244a26-2db3-4873-a3c4-11677f61d56b-kube-api-access-qrl64\") pod \"redhat-marketplace-882nr\" (UID: \"8f244a26-2db3-4873-a3c4-11677f61d56b\") " pod="openshift-marketplace/redhat-marketplace-882nr" Jan 04 12:03:10 crc kubenswrapper[5003]: I0104 12:03:10.691198 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f244a26-2db3-4873-a3c4-11677f61d56b-utilities\") pod \"redhat-marketplace-882nr\" (UID: \"8f244a26-2db3-4873-a3c4-11677f61d56b\") " pod="openshift-marketplace/redhat-marketplace-882nr" Jan 04 12:03:10 crc kubenswrapper[5003]: I0104 12:03:10.772626 5003 generic.go:334] "Generic (PLEG): container finished" podID="c70c4c5a-820d-4671-a4a8-dae25ff5f3f3" containerID="0c5c1ba0e2fd996c604857e86b55480abff9352efc0d66cc575b503aa5a6df32" exitCode=0 Jan 04 12:03:10 crc kubenswrapper[5003]: I0104 12:03:10.772690 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8" event={"ID":"c70c4c5a-820d-4671-a4a8-dae25ff5f3f3","Type":"ContainerDied","Data":"0c5c1ba0e2fd996c604857e86b55480abff9352efc0d66cc575b503aa5a6df32"} Jan 04 12:03:10 crc kubenswrapper[5003]: I0104 12:03:10.792347 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrl64\" (UniqueName: \"kubernetes.io/projected/8f244a26-2db3-4873-a3c4-11677f61d56b-kube-api-access-qrl64\") pod \"redhat-marketplace-882nr\" (UID: \"8f244a26-2db3-4873-a3c4-11677f61d56b\") " pod="openshift-marketplace/redhat-marketplace-882nr" Jan 04 12:03:10 crc kubenswrapper[5003]: I0104 12:03:10.792439 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f244a26-2db3-4873-a3c4-11677f61d56b-utilities\") pod \"redhat-marketplace-882nr\" (UID: \"8f244a26-2db3-4873-a3c4-11677f61d56b\") " pod="openshift-marketplace/redhat-marketplace-882nr" Jan 04 12:03:10 crc kubenswrapper[5003]: I0104 12:03:10.792575 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f244a26-2db3-4873-a3c4-11677f61d56b-catalog-content\") pod \"redhat-marketplace-882nr\" (UID: \"8f244a26-2db3-4873-a3c4-11677f61d56b\") " pod="openshift-marketplace/redhat-marketplace-882nr" Jan 04 12:03:10 crc kubenswrapper[5003]: I0104 12:03:10.793163 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f244a26-2db3-4873-a3c4-11677f61d56b-utilities\") pod \"redhat-marketplace-882nr\" (UID: \"8f244a26-2db3-4873-a3c4-11677f61d56b\") " pod="openshift-marketplace/redhat-marketplace-882nr" Jan 04 12:03:10 crc kubenswrapper[5003]: I0104 12:03:10.793275 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f244a26-2db3-4873-a3c4-11677f61d56b-catalog-content\") pod \"redhat-marketplace-882nr\" (UID: \"8f244a26-2db3-4873-a3c4-11677f61d56b\") " pod="openshift-marketplace/redhat-marketplace-882nr" Jan 04 12:03:10 crc kubenswrapper[5003]: I0104 12:03:10.816267 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrl64\" (UniqueName: \"kubernetes.io/projected/8f244a26-2db3-4873-a3c4-11677f61d56b-kube-api-access-qrl64\") pod \"redhat-marketplace-882nr\" (UID: \"8f244a26-2db3-4873-a3c4-11677f61d56b\") " pod="openshift-marketplace/redhat-marketplace-882nr" Jan 04 12:03:10 crc kubenswrapper[5003]: I0104 12:03:10.972117 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-882nr" Jan 04 12:03:11 crc kubenswrapper[5003]: I0104 12:03:11.376038 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-882nr"] Jan 04 12:03:11 crc kubenswrapper[5003]: I0104 12:03:11.780464 5003 generic.go:334] "Generic (PLEG): container finished" podID="8f244a26-2db3-4873-a3c4-11677f61d56b" containerID="c21b42d5bddf1bfa59c2b10b688c09cb219ba7b2589b0fbd4d342674eef67148" exitCode=0 Jan 04 12:03:11 crc kubenswrapper[5003]: I0104 12:03:11.780555 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-882nr" event={"ID":"8f244a26-2db3-4873-a3c4-11677f61d56b","Type":"ContainerDied","Data":"c21b42d5bddf1bfa59c2b10b688c09cb219ba7b2589b0fbd4d342674eef67148"} Jan 04 12:03:11 crc kubenswrapper[5003]: I0104 12:03:11.780811 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-882nr" event={"ID":"8f244a26-2db3-4873-a3c4-11677f61d56b","Type":"ContainerStarted","Data":"49dcfa7e3925817c96f5d2843aa667afa535ebf2954ef18063bf9cd7f7c9ce0c"} Jan 04 12:03:12 crc kubenswrapper[5003]: I0104 12:03:12.032546 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8" Jan 04 12:03:12 crc kubenswrapper[5003]: I0104 12:03:12.111532 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bkpng\" (UniqueName: \"kubernetes.io/projected/c70c4c5a-820d-4671-a4a8-dae25ff5f3f3-kube-api-access-bkpng\") pod \"c70c4c5a-820d-4671-a4a8-dae25ff5f3f3\" (UID: \"c70c4c5a-820d-4671-a4a8-dae25ff5f3f3\") " Jan 04 12:03:12 crc kubenswrapper[5003]: I0104 12:03:12.111615 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c70c4c5a-820d-4671-a4a8-dae25ff5f3f3-bundle\") pod \"c70c4c5a-820d-4671-a4a8-dae25ff5f3f3\" (UID: \"c70c4c5a-820d-4671-a4a8-dae25ff5f3f3\") " Jan 04 12:03:12 crc kubenswrapper[5003]: I0104 12:03:12.111717 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c70c4c5a-820d-4671-a4a8-dae25ff5f3f3-util\") pod \"c70c4c5a-820d-4671-a4a8-dae25ff5f3f3\" (UID: \"c70c4c5a-820d-4671-a4a8-dae25ff5f3f3\") " Jan 04 12:03:12 crc kubenswrapper[5003]: I0104 12:03:12.112786 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c70c4c5a-820d-4671-a4a8-dae25ff5f3f3-bundle" (OuterVolumeSpecName: "bundle") pod "c70c4c5a-820d-4671-a4a8-dae25ff5f3f3" (UID: "c70c4c5a-820d-4671-a4a8-dae25ff5f3f3"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:03:12 crc kubenswrapper[5003]: I0104 12:03:12.117489 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c70c4c5a-820d-4671-a4a8-dae25ff5f3f3-kube-api-access-bkpng" (OuterVolumeSpecName: "kube-api-access-bkpng") pod "c70c4c5a-820d-4671-a4a8-dae25ff5f3f3" (UID: "c70c4c5a-820d-4671-a4a8-dae25ff5f3f3"). InnerVolumeSpecName "kube-api-access-bkpng". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:03:12 crc kubenswrapper[5003]: I0104 12:03:12.121283 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c70c4c5a-820d-4671-a4a8-dae25ff5f3f3-util" (OuterVolumeSpecName: "util") pod "c70c4c5a-820d-4671-a4a8-dae25ff5f3f3" (UID: "c70c4c5a-820d-4671-a4a8-dae25ff5f3f3"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:03:12 crc kubenswrapper[5003]: I0104 12:03:12.213591 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bkpng\" (UniqueName: \"kubernetes.io/projected/c70c4c5a-820d-4671-a4a8-dae25ff5f3f3-kube-api-access-bkpng\") on node \"crc\" DevicePath \"\"" Jan 04 12:03:12 crc kubenswrapper[5003]: I0104 12:03:12.213666 5003 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c70c4c5a-820d-4671-a4a8-dae25ff5f3f3-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:03:12 crc kubenswrapper[5003]: I0104 12:03:12.213681 5003 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c70c4c5a-820d-4671-a4a8-dae25ff5f3f3-util\") on node \"crc\" DevicePath \"\"" Jan 04 12:03:12 crc kubenswrapper[5003]: I0104 12:03:12.787539 5003 generic.go:334] "Generic (PLEG): container finished" podID="8f244a26-2db3-4873-a3c4-11677f61d56b" containerID="54a33bfe65a77a14fff2ff7b325a95d53b15da21927e7aba8dba23f7aa64415f" exitCode=0 Jan 04 12:03:12 crc kubenswrapper[5003]: I0104 12:03:12.787671 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-882nr" event={"ID":"8f244a26-2db3-4873-a3c4-11677f61d56b","Type":"ContainerDied","Data":"54a33bfe65a77a14fff2ff7b325a95d53b15da21927e7aba8dba23f7aa64415f"} Jan 04 12:03:12 crc kubenswrapper[5003]: I0104 12:03:12.791653 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8" event={"ID":"c70c4c5a-820d-4671-a4a8-dae25ff5f3f3","Type":"ContainerDied","Data":"22ac86aef7867ab624edb87d25db601214d71456e8ce090c81ab01113b17a0d3"} Jan 04 12:03:12 crc kubenswrapper[5003]: I0104 12:03:12.791705 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="22ac86aef7867ab624edb87d25db601214d71456e8ce090c81ab01113b17a0d3" Jan 04 12:03:12 crc kubenswrapper[5003]: I0104 12:03:12.791718 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8" Jan 04 12:03:13 crc kubenswrapper[5003]: I0104 12:03:13.801357 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-882nr" event={"ID":"8f244a26-2db3-4873-a3c4-11677f61d56b","Type":"ContainerStarted","Data":"269fa45610f3de03a5f328f7025769f71a1e15d63595bbc02d675e3ba7e9c370"} Jan 04 12:03:13 crc kubenswrapper[5003]: I0104 12:03:13.820663 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-882nr" podStartSLOduration=2.361412932 podStartE2EDuration="3.820639345s" podCreationTimestamp="2026-01-04 12:03:10 +0000 UTC" firstStartedPulling="2026-01-04 12:03:11.782313448 +0000 UTC m=+907.255343289" lastFinishedPulling="2026-01-04 12:03:13.241539861 +0000 UTC m=+908.714569702" observedRunningTime="2026-01-04 12:03:13.817398729 +0000 UTC m=+909.290428580" watchObservedRunningTime="2026-01-04 12:03:13.820639345 +0000 UTC m=+909.293669196" Jan 04 12:03:19 crc kubenswrapper[5003]: I0104 12:03:19.054566 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-p5b46"] Jan 04 12:03:19 crc kubenswrapper[5003]: E0104 12:03:19.055658 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c70c4c5a-820d-4671-a4a8-dae25ff5f3f3" containerName="extract" Jan 04 12:03:19 crc kubenswrapper[5003]: I0104 12:03:19.055676 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c70c4c5a-820d-4671-a4a8-dae25ff5f3f3" containerName="extract" Jan 04 12:03:19 crc kubenswrapper[5003]: E0104 12:03:19.055697 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c70c4c5a-820d-4671-a4a8-dae25ff5f3f3" containerName="util" Jan 04 12:03:19 crc kubenswrapper[5003]: I0104 12:03:19.055705 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c70c4c5a-820d-4671-a4a8-dae25ff5f3f3" containerName="util" Jan 04 12:03:19 crc kubenswrapper[5003]: E0104 12:03:19.055714 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c70c4c5a-820d-4671-a4a8-dae25ff5f3f3" containerName="pull" Jan 04 12:03:19 crc kubenswrapper[5003]: I0104 12:03:19.055721 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c70c4c5a-820d-4671-a4a8-dae25ff5f3f3" containerName="pull" Jan 04 12:03:19 crc kubenswrapper[5003]: I0104 12:03:19.055855 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="c70c4c5a-820d-4671-a4a8-dae25ff5f3f3" containerName="extract" Jan 04 12:03:19 crc kubenswrapper[5003]: I0104 12:03:19.056861 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p5b46" Jan 04 12:03:19 crc kubenswrapper[5003]: I0104 12:03:19.085001 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-p5b46"] Jan 04 12:03:19 crc kubenswrapper[5003]: I0104 12:03:19.108496 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a380299-3d27-416d-9615-d3344069b769-utilities\") pod \"community-operators-p5b46\" (UID: \"2a380299-3d27-416d-9615-d3344069b769\") " pod="openshift-marketplace/community-operators-p5b46" Jan 04 12:03:19 crc kubenswrapper[5003]: I0104 12:03:19.116397 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w62lq\" (UniqueName: \"kubernetes.io/projected/2a380299-3d27-416d-9615-d3344069b769-kube-api-access-w62lq\") pod \"community-operators-p5b46\" (UID: \"2a380299-3d27-416d-9615-d3344069b769\") " pod="openshift-marketplace/community-operators-p5b46" Jan 04 12:03:19 crc kubenswrapper[5003]: I0104 12:03:19.116608 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a380299-3d27-416d-9615-d3344069b769-catalog-content\") pod \"community-operators-p5b46\" (UID: \"2a380299-3d27-416d-9615-d3344069b769\") " pod="openshift-marketplace/community-operators-p5b46" Jan 04 12:03:19 crc kubenswrapper[5003]: I0104 12:03:19.218021 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a380299-3d27-416d-9615-d3344069b769-catalog-content\") pod \"community-operators-p5b46\" (UID: \"2a380299-3d27-416d-9615-d3344069b769\") " pod="openshift-marketplace/community-operators-p5b46" Jan 04 12:03:19 crc kubenswrapper[5003]: I0104 12:03:19.218141 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a380299-3d27-416d-9615-d3344069b769-utilities\") pod \"community-operators-p5b46\" (UID: \"2a380299-3d27-416d-9615-d3344069b769\") " pod="openshift-marketplace/community-operators-p5b46" Jan 04 12:03:19 crc kubenswrapper[5003]: I0104 12:03:19.218189 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w62lq\" (UniqueName: \"kubernetes.io/projected/2a380299-3d27-416d-9615-d3344069b769-kube-api-access-w62lq\") pod \"community-operators-p5b46\" (UID: \"2a380299-3d27-416d-9615-d3344069b769\") " pod="openshift-marketplace/community-operators-p5b46" Jan 04 12:03:19 crc kubenswrapper[5003]: I0104 12:03:19.219112 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a380299-3d27-416d-9615-d3344069b769-catalog-content\") pod \"community-operators-p5b46\" (UID: \"2a380299-3d27-416d-9615-d3344069b769\") " pod="openshift-marketplace/community-operators-p5b46" Jan 04 12:03:19 crc kubenswrapper[5003]: I0104 12:03:19.219404 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a380299-3d27-416d-9615-d3344069b769-utilities\") pod \"community-operators-p5b46\" (UID: \"2a380299-3d27-416d-9615-d3344069b769\") " pod="openshift-marketplace/community-operators-p5b46" Jan 04 12:03:19 crc kubenswrapper[5003]: I0104 12:03:19.241838 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w62lq\" (UniqueName: \"kubernetes.io/projected/2a380299-3d27-416d-9615-d3344069b769-kube-api-access-w62lq\") pod \"community-operators-p5b46\" (UID: \"2a380299-3d27-416d-9615-d3344069b769\") " pod="openshift-marketplace/community-operators-p5b46" Jan 04 12:03:19 crc kubenswrapper[5003]: I0104 12:03:19.388006 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p5b46" Jan 04 12:03:19 crc kubenswrapper[5003]: I0104 12:03:19.725191 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-p5b46"] Jan 04 12:03:19 crc kubenswrapper[5003]: I0104 12:03:19.839701 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p5b46" event={"ID":"2a380299-3d27-416d-9615-d3344069b769","Type":"ContainerStarted","Data":"10733b352cafeeef92d274b6713254aab154f35c797efd2935a5b63294e739f4"} Jan 04 12:03:19 crc kubenswrapper[5003]: I0104 12:03:19.853141 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-wql6l"] Jan 04 12:03:19 crc kubenswrapper[5003]: I0104 12:03:19.854302 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-wql6l" Jan 04 12:03:19 crc kubenswrapper[5003]: I0104 12:03:19.855857 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Jan 04 12:03:19 crc kubenswrapper[5003]: I0104 12:03:19.856502 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Jan 04 12:03:19 crc kubenswrapper[5003]: I0104 12:03:19.860733 5003 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-8pgq4" Jan 04 12:03:19 crc kubenswrapper[5003]: I0104 12:03:19.872853 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-wql6l"] Jan 04 12:03:19 crc kubenswrapper[5003]: I0104 12:03:19.928181 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lp42x\" (UniqueName: \"kubernetes.io/projected/0b3a9d8d-57e2-4113-8dcc-dbad91249465-kube-api-access-lp42x\") pod \"cert-manager-operator-controller-manager-64cf6dff88-wql6l\" (UID: \"0b3a9d8d-57e2-4113-8dcc-dbad91249465\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-wql6l" Jan 04 12:03:19 crc kubenswrapper[5003]: I0104 12:03:19.928227 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/0b3a9d8d-57e2-4113-8dcc-dbad91249465-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-wql6l\" (UID: \"0b3a9d8d-57e2-4113-8dcc-dbad91249465\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-wql6l" Jan 04 12:03:20 crc kubenswrapper[5003]: I0104 12:03:20.028792 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/0b3a9d8d-57e2-4113-8dcc-dbad91249465-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-wql6l\" (UID: \"0b3a9d8d-57e2-4113-8dcc-dbad91249465\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-wql6l" Jan 04 12:03:20 crc kubenswrapper[5003]: I0104 12:03:20.028911 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lp42x\" (UniqueName: \"kubernetes.io/projected/0b3a9d8d-57e2-4113-8dcc-dbad91249465-kube-api-access-lp42x\") pod \"cert-manager-operator-controller-manager-64cf6dff88-wql6l\" (UID: \"0b3a9d8d-57e2-4113-8dcc-dbad91249465\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-wql6l" Jan 04 12:03:20 crc kubenswrapper[5003]: I0104 12:03:20.029424 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/0b3a9d8d-57e2-4113-8dcc-dbad91249465-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-wql6l\" (UID: \"0b3a9d8d-57e2-4113-8dcc-dbad91249465\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-wql6l" Jan 04 12:03:20 crc kubenswrapper[5003]: I0104 12:03:20.051540 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lp42x\" (UniqueName: \"kubernetes.io/projected/0b3a9d8d-57e2-4113-8dcc-dbad91249465-kube-api-access-lp42x\") pod \"cert-manager-operator-controller-manager-64cf6dff88-wql6l\" (UID: \"0b3a9d8d-57e2-4113-8dcc-dbad91249465\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-wql6l" Jan 04 12:03:20 crc kubenswrapper[5003]: I0104 12:03:20.171503 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-wql6l" Jan 04 12:03:20 crc kubenswrapper[5003]: I0104 12:03:20.673843 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-wql6l"] Jan 04 12:03:20 crc kubenswrapper[5003]: W0104 12:03:20.680322 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0b3a9d8d_57e2_4113_8dcc_dbad91249465.slice/crio-fccbe4009f2d700df6115e6e4acd56bead1f2138656ca91daf044f7c8a96df8b WatchSource:0}: Error finding container fccbe4009f2d700df6115e6e4acd56bead1f2138656ca91daf044f7c8a96df8b: Status 404 returned error can't find the container with id fccbe4009f2d700df6115e6e4acd56bead1f2138656ca91daf044f7c8a96df8b Jan 04 12:03:20 crc kubenswrapper[5003]: I0104 12:03:20.845867 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-wql6l" event={"ID":"0b3a9d8d-57e2-4113-8dcc-dbad91249465","Type":"ContainerStarted","Data":"fccbe4009f2d700df6115e6e4acd56bead1f2138656ca91daf044f7c8a96df8b"} Jan 04 12:03:20 crc kubenswrapper[5003]: I0104 12:03:20.847985 5003 generic.go:334] "Generic (PLEG): container finished" podID="2a380299-3d27-416d-9615-d3344069b769" containerID="f6f3cc33e3a7681fa8641f91e044ae2e6d8a93f4a35b78e99373c00c07aea658" exitCode=0 Jan 04 12:03:20 crc kubenswrapper[5003]: I0104 12:03:20.848067 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p5b46" event={"ID":"2a380299-3d27-416d-9615-d3344069b769","Type":"ContainerDied","Data":"f6f3cc33e3a7681fa8641f91e044ae2e6d8a93f4a35b78e99373c00c07aea658"} Jan 04 12:03:20 crc kubenswrapper[5003]: I0104 12:03:20.972601 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-882nr" Jan 04 12:03:20 crc kubenswrapper[5003]: I0104 12:03:20.972659 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-882nr" Jan 04 12:03:21 crc kubenswrapper[5003]: I0104 12:03:21.028905 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-882nr" Jan 04 12:03:21 crc kubenswrapper[5003]: I0104 12:03:21.920975 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-882nr" Jan 04 12:03:22 crc kubenswrapper[5003]: I0104 12:03:22.886533 5003 generic.go:334] "Generic (PLEG): container finished" podID="2a380299-3d27-416d-9615-d3344069b769" containerID="5899f155b3653b554108ccd0d75dfcf1557746002020f59a94e20d5ad45d31ad" exitCode=0 Jan 04 12:03:22 crc kubenswrapper[5003]: I0104 12:03:22.886928 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p5b46" event={"ID":"2a380299-3d27-416d-9615-d3344069b769","Type":"ContainerDied","Data":"5899f155b3653b554108ccd0d75dfcf1557746002020f59a94e20d5ad45d31ad"} Jan 04 12:03:23 crc kubenswrapper[5003]: I0104 12:03:23.898618 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p5b46" event={"ID":"2a380299-3d27-416d-9615-d3344069b769","Type":"ContainerStarted","Data":"302eb350091b0c63b7d3f861ba68938b4950d02faed3c46f10e45b005fd2ac99"} Jan 04 12:03:23 crc kubenswrapper[5003]: I0104 12:03:23.926783 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-p5b46" podStartSLOduration=2.311271036 podStartE2EDuration="4.926760914s" podCreationTimestamp="2026-01-04 12:03:19 +0000 UTC" firstStartedPulling="2026-01-04 12:03:20.850385763 +0000 UTC m=+916.323415604" lastFinishedPulling="2026-01-04 12:03:23.465875641 +0000 UTC m=+918.938905482" observedRunningTime="2026-01-04 12:03:23.91946005 +0000 UTC m=+919.392489881" watchObservedRunningTime="2026-01-04 12:03:23.926760914 +0000 UTC m=+919.399790755" Jan 04 12:03:25 crc kubenswrapper[5003]: I0104 12:03:25.840387 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-882nr"] Jan 04 12:03:25 crc kubenswrapper[5003]: I0104 12:03:25.840860 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-882nr" podUID="8f244a26-2db3-4873-a3c4-11677f61d56b" containerName="registry-server" containerID="cri-o://269fa45610f3de03a5f328f7025769f71a1e15d63595bbc02d675e3ba7e9c370" gracePeriod=2 Jan 04 12:03:26 crc kubenswrapper[5003]: I0104 12:03:26.931549 5003 generic.go:334] "Generic (PLEG): container finished" podID="8f244a26-2db3-4873-a3c4-11677f61d56b" containerID="269fa45610f3de03a5f328f7025769f71a1e15d63595bbc02d675e3ba7e9c370" exitCode=0 Jan 04 12:03:26 crc kubenswrapper[5003]: I0104 12:03:26.931958 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-882nr" event={"ID":"8f244a26-2db3-4873-a3c4-11677f61d56b","Type":"ContainerDied","Data":"269fa45610f3de03a5f328f7025769f71a1e15d63595bbc02d675e3ba7e9c370"} Jan 04 12:03:29 crc kubenswrapper[5003]: I0104 12:03:29.388761 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-p5b46" Jan 04 12:03:29 crc kubenswrapper[5003]: I0104 12:03:29.389347 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-p5b46" Jan 04 12:03:29 crc kubenswrapper[5003]: I0104 12:03:29.449877 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-p5b46" Jan 04 12:03:30 crc kubenswrapper[5003]: I0104 12:03:30.009908 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-p5b46" Jan 04 12:03:30 crc kubenswrapper[5003]: I0104 12:03:30.074766 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-882nr" Jan 04 12:03:30 crc kubenswrapper[5003]: I0104 12:03:30.177148 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qrl64\" (UniqueName: \"kubernetes.io/projected/8f244a26-2db3-4873-a3c4-11677f61d56b-kube-api-access-qrl64\") pod \"8f244a26-2db3-4873-a3c4-11677f61d56b\" (UID: \"8f244a26-2db3-4873-a3c4-11677f61d56b\") " Jan 04 12:03:30 crc kubenswrapper[5003]: I0104 12:03:30.177237 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f244a26-2db3-4873-a3c4-11677f61d56b-utilities\") pod \"8f244a26-2db3-4873-a3c4-11677f61d56b\" (UID: \"8f244a26-2db3-4873-a3c4-11677f61d56b\") " Jan 04 12:03:30 crc kubenswrapper[5003]: I0104 12:03:30.177316 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f244a26-2db3-4873-a3c4-11677f61d56b-catalog-content\") pod \"8f244a26-2db3-4873-a3c4-11677f61d56b\" (UID: \"8f244a26-2db3-4873-a3c4-11677f61d56b\") " Jan 04 12:03:30 crc kubenswrapper[5003]: I0104 12:03:30.178153 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f244a26-2db3-4873-a3c4-11677f61d56b-utilities" (OuterVolumeSpecName: "utilities") pod "8f244a26-2db3-4873-a3c4-11677f61d56b" (UID: "8f244a26-2db3-4873-a3c4-11677f61d56b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:03:30 crc kubenswrapper[5003]: I0104 12:03:30.184228 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f244a26-2db3-4873-a3c4-11677f61d56b-kube-api-access-qrl64" (OuterVolumeSpecName: "kube-api-access-qrl64") pod "8f244a26-2db3-4873-a3c4-11677f61d56b" (UID: "8f244a26-2db3-4873-a3c4-11677f61d56b"). InnerVolumeSpecName "kube-api-access-qrl64". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:03:30 crc kubenswrapper[5003]: I0104 12:03:30.197333 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f244a26-2db3-4873-a3c4-11677f61d56b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8f244a26-2db3-4873-a3c4-11677f61d56b" (UID: "8f244a26-2db3-4873-a3c4-11677f61d56b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:03:30 crc kubenswrapper[5003]: I0104 12:03:30.279244 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8f244a26-2db3-4873-a3c4-11677f61d56b-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:03:30 crc kubenswrapper[5003]: I0104 12:03:30.279290 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8f244a26-2db3-4873-a3c4-11677f61d56b-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:03:30 crc kubenswrapper[5003]: I0104 12:03:30.279306 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qrl64\" (UniqueName: \"kubernetes.io/projected/8f244a26-2db3-4873-a3c4-11677f61d56b-kube-api-access-qrl64\") on node \"crc\" DevicePath \"\"" Jan 04 12:03:30 crc kubenswrapper[5003]: I0104 12:03:30.965454 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-wql6l" event={"ID":"0b3a9d8d-57e2-4113-8dcc-dbad91249465","Type":"ContainerStarted","Data":"64d3ef958d7f8a842622f5f68574a183470f3aed17d09ad28207b498bedc3e6b"} Jan 04 12:03:30 crc kubenswrapper[5003]: I0104 12:03:30.969730 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-882nr" event={"ID":"8f244a26-2db3-4873-a3c4-11677f61d56b","Type":"ContainerDied","Data":"49dcfa7e3925817c96f5d2843aa667afa535ebf2954ef18063bf9cd7f7c9ce0c"} Jan 04 12:03:30 crc kubenswrapper[5003]: I0104 12:03:30.969751 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-882nr" Jan 04 12:03:30 crc kubenswrapper[5003]: I0104 12:03:30.969885 5003 scope.go:117] "RemoveContainer" containerID="269fa45610f3de03a5f328f7025769f71a1e15d63595bbc02d675e3ba7e9c370" Jan 04 12:03:31 crc kubenswrapper[5003]: I0104 12:03:31.015978 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-wql6l" podStartSLOduration=2.802064144 podStartE2EDuration="12.01589566s" podCreationTimestamp="2026-01-04 12:03:19 +0000 UTC" firstStartedPulling="2026-01-04 12:03:20.684348862 +0000 UTC m=+916.157378703" lastFinishedPulling="2026-01-04 12:03:29.898180378 +0000 UTC m=+925.371210219" observedRunningTime="2026-01-04 12:03:31.005666458 +0000 UTC m=+926.478696359" watchObservedRunningTime="2026-01-04 12:03:31.01589566 +0000 UTC m=+926.488925561" Jan 04 12:03:31 crc kubenswrapper[5003]: I0104 12:03:31.017412 5003 scope.go:117] "RemoveContainer" containerID="54a33bfe65a77a14fff2ff7b325a95d53b15da21927e7aba8dba23f7aa64415f" Jan 04 12:03:31 crc kubenswrapper[5003]: I0104 12:03:31.064464 5003 scope.go:117] "RemoveContainer" containerID="c21b42d5bddf1bfa59c2b10b688c09cb219ba7b2589b0fbd4d342674eef67148" Jan 04 12:03:31 crc kubenswrapper[5003]: I0104 12:03:31.080733 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-882nr"] Jan 04 12:03:31 crc kubenswrapper[5003]: I0104 12:03:31.084051 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-882nr"] Jan 04 12:03:31 crc kubenswrapper[5003]: I0104 12:03:31.641399 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-p5b46"] Jan 04 12:03:32 crc kubenswrapper[5003]: I0104 12:03:32.024876 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-p5b46" podUID="2a380299-3d27-416d-9615-d3344069b769" containerName="registry-server" containerID="cri-o://302eb350091b0c63b7d3f861ba68938b4950d02faed3c46f10e45b005fd2ac99" gracePeriod=2 Jan 04 12:03:32 crc kubenswrapper[5003]: I0104 12:03:32.819455 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f244a26-2db3-4873-a3c4-11677f61d56b" path="/var/lib/kubelet/pods/8f244a26-2db3-4873-a3c4-11677f61d56b/volumes" Jan 04 12:03:32 crc kubenswrapper[5003]: I0104 12:03:32.948845 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p5b46" Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.019878 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w62lq\" (UniqueName: \"kubernetes.io/projected/2a380299-3d27-416d-9615-d3344069b769-kube-api-access-w62lq\") pod \"2a380299-3d27-416d-9615-d3344069b769\" (UID: \"2a380299-3d27-416d-9615-d3344069b769\") " Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.019966 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a380299-3d27-416d-9615-d3344069b769-utilities\") pod \"2a380299-3d27-416d-9615-d3344069b769\" (UID: \"2a380299-3d27-416d-9615-d3344069b769\") " Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.020076 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a380299-3d27-416d-9615-d3344069b769-catalog-content\") pod \"2a380299-3d27-416d-9615-d3344069b769\" (UID: \"2a380299-3d27-416d-9615-d3344069b769\") " Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.020892 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a380299-3d27-416d-9615-d3344069b769-utilities" (OuterVolumeSpecName: "utilities") pod "2a380299-3d27-416d-9615-d3344069b769" (UID: "2a380299-3d27-416d-9615-d3344069b769"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.029333 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a380299-3d27-416d-9615-d3344069b769-kube-api-access-w62lq" (OuterVolumeSpecName: "kube-api-access-w62lq") pod "2a380299-3d27-416d-9615-d3344069b769" (UID: "2a380299-3d27-416d-9615-d3344069b769"). InnerVolumeSpecName "kube-api-access-w62lq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.034891 5003 generic.go:334] "Generic (PLEG): container finished" podID="2a380299-3d27-416d-9615-d3344069b769" containerID="302eb350091b0c63b7d3f861ba68938b4950d02faed3c46f10e45b005fd2ac99" exitCode=0 Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.034962 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p5b46" event={"ID":"2a380299-3d27-416d-9615-d3344069b769","Type":"ContainerDied","Data":"302eb350091b0c63b7d3f861ba68938b4950d02faed3c46f10e45b005fd2ac99"} Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.035033 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p5b46" event={"ID":"2a380299-3d27-416d-9615-d3344069b769","Type":"ContainerDied","Data":"10733b352cafeeef92d274b6713254aab154f35c797efd2935a5b63294e739f4"} Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.035062 5003 scope.go:117] "RemoveContainer" containerID="302eb350091b0c63b7d3f861ba68938b4950d02faed3c46f10e45b005fd2ac99" Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.035222 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p5b46" Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.064739 5003 scope.go:117] "RemoveContainer" containerID="5899f155b3653b554108ccd0d75dfcf1557746002020f59a94e20d5ad45d31ad" Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.070342 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a380299-3d27-416d-9615-d3344069b769-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2a380299-3d27-416d-9615-d3344069b769" (UID: "2a380299-3d27-416d-9615-d3344069b769"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.080503 5003 scope.go:117] "RemoveContainer" containerID="f6f3cc33e3a7681fa8641f91e044ae2e6d8a93f4a35b78e99373c00c07aea658" Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.101479 5003 scope.go:117] "RemoveContainer" containerID="302eb350091b0c63b7d3f861ba68938b4950d02faed3c46f10e45b005fd2ac99" Jan 04 12:03:33 crc kubenswrapper[5003]: E0104 12:03:33.102004 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"302eb350091b0c63b7d3f861ba68938b4950d02faed3c46f10e45b005fd2ac99\": container with ID starting with 302eb350091b0c63b7d3f861ba68938b4950d02faed3c46f10e45b005fd2ac99 not found: ID does not exist" containerID="302eb350091b0c63b7d3f861ba68938b4950d02faed3c46f10e45b005fd2ac99" Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.102060 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"302eb350091b0c63b7d3f861ba68938b4950d02faed3c46f10e45b005fd2ac99"} err="failed to get container status \"302eb350091b0c63b7d3f861ba68938b4950d02faed3c46f10e45b005fd2ac99\": rpc error: code = NotFound desc = could not find container \"302eb350091b0c63b7d3f861ba68938b4950d02faed3c46f10e45b005fd2ac99\": container with ID starting with 302eb350091b0c63b7d3f861ba68938b4950d02faed3c46f10e45b005fd2ac99 not found: ID does not exist" Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.102087 5003 scope.go:117] "RemoveContainer" containerID="5899f155b3653b554108ccd0d75dfcf1557746002020f59a94e20d5ad45d31ad" Jan 04 12:03:33 crc kubenswrapper[5003]: E0104 12:03:33.102443 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5899f155b3653b554108ccd0d75dfcf1557746002020f59a94e20d5ad45d31ad\": container with ID starting with 5899f155b3653b554108ccd0d75dfcf1557746002020f59a94e20d5ad45d31ad not found: ID does not exist" containerID="5899f155b3653b554108ccd0d75dfcf1557746002020f59a94e20d5ad45d31ad" Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.102504 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5899f155b3653b554108ccd0d75dfcf1557746002020f59a94e20d5ad45d31ad"} err="failed to get container status \"5899f155b3653b554108ccd0d75dfcf1557746002020f59a94e20d5ad45d31ad\": rpc error: code = NotFound desc = could not find container \"5899f155b3653b554108ccd0d75dfcf1557746002020f59a94e20d5ad45d31ad\": container with ID starting with 5899f155b3653b554108ccd0d75dfcf1557746002020f59a94e20d5ad45d31ad not found: ID does not exist" Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.102541 5003 scope.go:117] "RemoveContainer" containerID="f6f3cc33e3a7681fa8641f91e044ae2e6d8a93f4a35b78e99373c00c07aea658" Jan 04 12:03:33 crc kubenswrapper[5003]: E0104 12:03:33.103193 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6f3cc33e3a7681fa8641f91e044ae2e6d8a93f4a35b78e99373c00c07aea658\": container with ID starting with f6f3cc33e3a7681fa8641f91e044ae2e6d8a93f4a35b78e99373c00c07aea658 not found: ID does not exist" containerID="f6f3cc33e3a7681fa8641f91e044ae2e6d8a93f4a35b78e99373c00c07aea658" Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.103242 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6f3cc33e3a7681fa8641f91e044ae2e6d8a93f4a35b78e99373c00c07aea658"} err="failed to get container status \"f6f3cc33e3a7681fa8641f91e044ae2e6d8a93f4a35b78e99373c00c07aea658\": rpc error: code = NotFound desc = could not find container \"f6f3cc33e3a7681fa8641f91e044ae2e6d8a93f4a35b78e99373c00c07aea658\": container with ID starting with f6f3cc33e3a7681fa8641f91e044ae2e6d8a93f4a35b78e99373c00c07aea658 not found: ID does not exist" Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.121621 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w62lq\" (UniqueName: \"kubernetes.io/projected/2a380299-3d27-416d-9615-d3344069b769-kube-api-access-w62lq\") on node \"crc\" DevicePath \"\"" Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.121662 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a380299-3d27-416d-9615-d3344069b769-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.121679 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a380299-3d27-416d-9615-d3344069b769-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.363754 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-p5b46"] Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.373835 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-p5b46"] Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.961233 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-hvhx7"] Jan 04 12:03:33 crc kubenswrapper[5003]: E0104 12:03:33.961461 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a380299-3d27-416d-9615-d3344069b769" containerName="extract-utilities" Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.961473 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a380299-3d27-416d-9615-d3344069b769" containerName="extract-utilities" Jan 04 12:03:33 crc kubenswrapper[5003]: E0104 12:03:33.961488 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a380299-3d27-416d-9615-d3344069b769" containerName="extract-content" Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.961495 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a380299-3d27-416d-9615-d3344069b769" containerName="extract-content" Jan 04 12:03:33 crc kubenswrapper[5003]: E0104 12:03:33.961504 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a380299-3d27-416d-9615-d3344069b769" containerName="registry-server" Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.961511 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a380299-3d27-416d-9615-d3344069b769" containerName="registry-server" Jan 04 12:03:33 crc kubenswrapper[5003]: E0104 12:03:33.961521 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f244a26-2db3-4873-a3c4-11677f61d56b" containerName="extract-utilities" Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.961528 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f244a26-2db3-4873-a3c4-11677f61d56b" containerName="extract-utilities" Jan 04 12:03:33 crc kubenswrapper[5003]: E0104 12:03:33.961539 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f244a26-2db3-4873-a3c4-11677f61d56b" containerName="registry-server" Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.961545 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f244a26-2db3-4873-a3c4-11677f61d56b" containerName="registry-server" Jan 04 12:03:33 crc kubenswrapper[5003]: E0104 12:03:33.961556 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f244a26-2db3-4873-a3c4-11677f61d56b" containerName="extract-content" Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.961562 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f244a26-2db3-4873-a3c4-11677f61d56b" containerName="extract-content" Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.961688 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f244a26-2db3-4873-a3c4-11677f61d56b" containerName="registry-server" Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.961701 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a380299-3d27-416d-9615-d3344069b769" containerName="registry-server" Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.962154 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-hvhx7" Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.964049 5003 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-dmpt9" Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.968963 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.969385 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Jan 04 12:03:33 crc kubenswrapper[5003]: I0104 12:03:33.980690 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-hvhx7"] Jan 04 12:03:34 crc kubenswrapper[5003]: I0104 12:03:34.034357 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/27b975c9-a239-4818-b46c-f22360f31341-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-hvhx7\" (UID: \"27b975c9-a239-4818-b46c-f22360f31341\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-hvhx7" Jan 04 12:03:34 crc kubenswrapper[5003]: I0104 12:03:34.034429 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppf4x\" (UniqueName: \"kubernetes.io/projected/27b975c9-a239-4818-b46c-f22360f31341-kube-api-access-ppf4x\") pod \"cert-manager-webhook-f4fb5df64-hvhx7\" (UID: \"27b975c9-a239-4818-b46c-f22360f31341\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-hvhx7" Jan 04 12:03:34 crc kubenswrapper[5003]: I0104 12:03:34.136362 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/27b975c9-a239-4818-b46c-f22360f31341-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-hvhx7\" (UID: \"27b975c9-a239-4818-b46c-f22360f31341\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-hvhx7" Jan 04 12:03:34 crc kubenswrapper[5003]: I0104 12:03:34.136685 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppf4x\" (UniqueName: \"kubernetes.io/projected/27b975c9-a239-4818-b46c-f22360f31341-kube-api-access-ppf4x\") pod \"cert-manager-webhook-f4fb5df64-hvhx7\" (UID: \"27b975c9-a239-4818-b46c-f22360f31341\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-hvhx7" Jan 04 12:03:34 crc kubenswrapper[5003]: I0104 12:03:34.154288 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppf4x\" (UniqueName: \"kubernetes.io/projected/27b975c9-a239-4818-b46c-f22360f31341-kube-api-access-ppf4x\") pod \"cert-manager-webhook-f4fb5df64-hvhx7\" (UID: \"27b975c9-a239-4818-b46c-f22360f31341\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-hvhx7" Jan 04 12:03:34 crc kubenswrapper[5003]: I0104 12:03:34.174783 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/27b975c9-a239-4818-b46c-f22360f31341-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-hvhx7\" (UID: \"27b975c9-a239-4818-b46c-f22360f31341\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-hvhx7" Jan 04 12:03:34 crc kubenswrapper[5003]: I0104 12:03:34.283994 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-hvhx7" Jan 04 12:03:34 crc kubenswrapper[5003]: I0104 12:03:34.625443 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-hvhx7"] Jan 04 12:03:34 crc kubenswrapper[5003]: I0104 12:03:34.815916 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a380299-3d27-416d-9615-d3344069b769" path="/var/lib/kubelet/pods/2a380299-3d27-416d-9615-d3344069b769/volumes" Jan 04 12:03:35 crc kubenswrapper[5003]: I0104 12:03:35.062377 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-hvhx7" event={"ID":"27b975c9-a239-4818-b46c-f22360f31341","Type":"ContainerStarted","Data":"719bd94a356615a826fd772453138eba56bdd41b0f5576564aa72ade0d80b9c8"} Jan 04 12:03:38 crc kubenswrapper[5003]: I0104 12:03:38.449664 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-pqsnh"] Jan 04 12:03:38 crc kubenswrapper[5003]: I0104 12:03:38.452749 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pqsnh" Jan 04 12:03:38 crc kubenswrapper[5003]: I0104 12:03:38.459262 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pqsnh"] Jan 04 12:03:38 crc kubenswrapper[5003]: I0104 12:03:38.602245 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eff85255-cd95-48af-b44d-5c92d35a095a-catalog-content\") pod \"certified-operators-pqsnh\" (UID: \"eff85255-cd95-48af-b44d-5c92d35a095a\") " pod="openshift-marketplace/certified-operators-pqsnh" Jan 04 12:03:38 crc kubenswrapper[5003]: I0104 12:03:38.602770 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzq4v\" (UniqueName: \"kubernetes.io/projected/eff85255-cd95-48af-b44d-5c92d35a095a-kube-api-access-pzq4v\") pod \"certified-operators-pqsnh\" (UID: \"eff85255-cd95-48af-b44d-5c92d35a095a\") " pod="openshift-marketplace/certified-operators-pqsnh" Jan 04 12:03:38 crc kubenswrapper[5003]: I0104 12:03:38.602855 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eff85255-cd95-48af-b44d-5c92d35a095a-utilities\") pod \"certified-operators-pqsnh\" (UID: \"eff85255-cd95-48af-b44d-5c92d35a095a\") " pod="openshift-marketplace/certified-operators-pqsnh" Jan 04 12:03:38 crc kubenswrapper[5003]: I0104 12:03:38.704380 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eff85255-cd95-48af-b44d-5c92d35a095a-catalog-content\") pod \"certified-operators-pqsnh\" (UID: \"eff85255-cd95-48af-b44d-5c92d35a095a\") " pod="openshift-marketplace/certified-operators-pqsnh" Jan 04 12:03:38 crc kubenswrapper[5003]: I0104 12:03:38.704453 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzq4v\" (UniqueName: \"kubernetes.io/projected/eff85255-cd95-48af-b44d-5c92d35a095a-kube-api-access-pzq4v\") pod \"certified-operators-pqsnh\" (UID: \"eff85255-cd95-48af-b44d-5c92d35a095a\") " pod="openshift-marketplace/certified-operators-pqsnh" Jan 04 12:03:38 crc kubenswrapper[5003]: I0104 12:03:38.704501 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eff85255-cd95-48af-b44d-5c92d35a095a-utilities\") pod \"certified-operators-pqsnh\" (UID: \"eff85255-cd95-48af-b44d-5c92d35a095a\") " pod="openshift-marketplace/certified-operators-pqsnh" Jan 04 12:03:38 crc kubenswrapper[5003]: I0104 12:03:38.705139 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eff85255-cd95-48af-b44d-5c92d35a095a-utilities\") pod \"certified-operators-pqsnh\" (UID: \"eff85255-cd95-48af-b44d-5c92d35a095a\") " pod="openshift-marketplace/certified-operators-pqsnh" Jan 04 12:03:38 crc kubenswrapper[5003]: I0104 12:03:38.705471 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eff85255-cd95-48af-b44d-5c92d35a095a-catalog-content\") pod \"certified-operators-pqsnh\" (UID: \"eff85255-cd95-48af-b44d-5c92d35a095a\") " pod="openshift-marketplace/certified-operators-pqsnh" Jan 04 12:03:38 crc kubenswrapper[5003]: I0104 12:03:38.735619 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzq4v\" (UniqueName: \"kubernetes.io/projected/eff85255-cd95-48af-b44d-5c92d35a095a-kube-api-access-pzq4v\") pod \"certified-operators-pqsnh\" (UID: \"eff85255-cd95-48af-b44d-5c92d35a095a\") " pod="openshift-marketplace/certified-operators-pqsnh" Jan 04 12:03:38 crc kubenswrapper[5003]: I0104 12:03:38.756513 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-9njcn"] Jan 04 12:03:38 crc kubenswrapper[5003]: I0104 12:03:38.757454 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-9njcn" Jan 04 12:03:38 crc kubenswrapper[5003]: I0104 12:03:38.760434 5003 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-f7vgh" Jan 04 12:03:38 crc kubenswrapper[5003]: I0104 12:03:38.787978 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-9njcn"] Jan 04 12:03:38 crc kubenswrapper[5003]: I0104 12:03:38.817169 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pqsnh" Jan 04 12:03:38 crc kubenswrapper[5003]: I0104 12:03:38.911187 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4trt\" (UniqueName: \"kubernetes.io/projected/e3aa0270-77e2-4f4b-bbf7-8957259dc9d0-kube-api-access-r4trt\") pod \"cert-manager-cainjector-855d9ccff4-9njcn\" (UID: \"e3aa0270-77e2-4f4b-bbf7-8957259dc9d0\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-9njcn" Jan 04 12:03:38 crc kubenswrapper[5003]: I0104 12:03:38.911545 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e3aa0270-77e2-4f4b-bbf7-8957259dc9d0-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-9njcn\" (UID: \"e3aa0270-77e2-4f4b-bbf7-8957259dc9d0\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-9njcn" Jan 04 12:03:39 crc kubenswrapper[5003]: I0104 12:03:39.013457 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4trt\" (UniqueName: \"kubernetes.io/projected/e3aa0270-77e2-4f4b-bbf7-8957259dc9d0-kube-api-access-r4trt\") pod \"cert-manager-cainjector-855d9ccff4-9njcn\" (UID: \"e3aa0270-77e2-4f4b-bbf7-8957259dc9d0\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-9njcn" Jan 04 12:03:39 crc kubenswrapper[5003]: I0104 12:03:39.013796 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e3aa0270-77e2-4f4b-bbf7-8957259dc9d0-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-9njcn\" (UID: \"e3aa0270-77e2-4f4b-bbf7-8957259dc9d0\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-9njcn" Jan 04 12:03:39 crc kubenswrapper[5003]: I0104 12:03:39.037364 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4trt\" (UniqueName: \"kubernetes.io/projected/e3aa0270-77e2-4f4b-bbf7-8957259dc9d0-kube-api-access-r4trt\") pod \"cert-manager-cainjector-855d9ccff4-9njcn\" (UID: \"e3aa0270-77e2-4f4b-bbf7-8957259dc9d0\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-9njcn" Jan 04 12:03:39 crc kubenswrapper[5003]: I0104 12:03:39.038445 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e3aa0270-77e2-4f4b-bbf7-8957259dc9d0-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-9njcn\" (UID: \"e3aa0270-77e2-4f4b-bbf7-8957259dc9d0\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-9njcn" Jan 04 12:03:39 crc kubenswrapper[5003]: I0104 12:03:39.100025 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-9njcn" Jan 04 12:03:39 crc kubenswrapper[5003]: I0104 12:03:39.418827 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:03:39 crc kubenswrapper[5003]: I0104 12:03:39.418903 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:03:44 crc kubenswrapper[5003]: W0104 12:03:44.081712 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeff85255_cd95_48af_b44d_5c92d35a095a.slice/crio-d6a9f7551689795ee8bf4361bf481c8b06ccd2aa0bb8e6b57f509dd64017e9fb WatchSource:0}: Error finding container d6a9f7551689795ee8bf4361bf481c8b06ccd2aa0bb8e6b57f509dd64017e9fb: Status 404 returned error can't find the container with id d6a9f7551689795ee8bf4361bf481c8b06ccd2aa0bb8e6b57f509dd64017e9fb Jan 04 12:03:44 crc kubenswrapper[5003]: I0104 12:03:44.084389 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pqsnh"] Jan 04 12:03:44 crc kubenswrapper[5003]: I0104 12:03:44.143711 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pqsnh" event={"ID":"eff85255-cd95-48af-b44d-5c92d35a095a","Type":"ContainerStarted","Data":"d6a9f7551689795ee8bf4361bf481c8b06ccd2aa0bb8e6b57f509dd64017e9fb"} Jan 04 12:03:44 crc kubenswrapper[5003]: I0104 12:03:44.145063 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-hvhx7" event={"ID":"27b975c9-a239-4818-b46c-f22360f31341","Type":"ContainerStarted","Data":"1dd0e2f06a50e6bb276c0cd8749268b8ae509980c2d6098c9bba19db3a3228c0"} Jan 04 12:03:44 crc kubenswrapper[5003]: I0104 12:03:44.145219 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-f4fb5df64-hvhx7" Jan 04 12:03:44 crc kubenswrapper[5003]: I0104 12:03:44.171162 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-f4fb5df64-hvhx7" podStartSLOduration=2.092438118 podStartE2EDuration="11.171135475s" podCreationTimestamp="2026-01-04 12:03:33 +0000 UTC" firstStartedPulling="2026-01-04 12:03:34.646942794 +0000 UTC m=+930.119972635" lastFinishedPulling="2026-01-04 12:03:43.725640151 +0000 UTC m=+939.198669992" observedRunningTime="2026-01-04 12:03:44.164446307 +0000 UTC m=+939.637476148" watchObservedRunningTime="2026-01-04 12:03:44.171135475 +0000 UTC m=+939.644165336" Jan 04 12:03:44 crc kubenswrapper[5003]: I0104 12:03:44.171602 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-9njcn"] Jan 04 12:03:45 crc kubenswrapper[5003]: I0104 12:03:45.152159 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-9njcn" event={"ID":"e3aa0270-77e2-4f4b-bbf7-8957259dc9d0","Type":"ContainerStarted","Data":"f36be11968890532bcdaa2f89345b1ad596de1b20ac6f129db42f9e8ef2a9886"} Jan 04 12:03:45 crc kubenswrapper[5003]: I0104 12:03:45.152433 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-9njcn" event={"ID":"e3aa0270-77e2-4f4b-bbf7-8957259dc9d0","Type":"ContainerStarted","Data":"83b16f4aaee7805524245f9e6b7fb11e46db226c6a60b82b269c874b1e33a901"} Jan 04 12:03:45 crc kubenswrapper[5003]: I0104 12:03:45.153628 5003 generic.go:334] "Generic (PLEG): container finished" podID="eff85255-cd95-48af-b44d-5c92d35a095a" containerID="83d0fe71f787cfbde7b02bf8d751960c54997e6d734db1902fd39ac9a48e05e0" exitCode=0 Jan 04 12:03:45 crc kubenswrapper[5003]: I0104 12:03:45.153670 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pqsnh" event={"ID":"eff85255-cd95-48af-b44d-5c92d35a095a","Type":"ContainerDied","Data":"83d0fe71f787cfbde7b02bf8d751960c54997e6d734db1902fd39ac9a48e05e0"} Jan 04 12:03:45 crc kubenswrapper[5003]: I0104 12:03:45.168524 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-855d9ccff4-9njcn" podStartSLOduration=7.168506639 podStartE2EDuration="7.168506639s" podCreationTimestamp="2026-01-04 12:03:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:03:45.166920007 +0000 UTC m=+940.639949878" watchObservedRunningTime="2026-01-04 12:03:45.168506639 +0000 UTC m=+940.641536500" Jan 04 12:03:46 crc kubenswrapper[5003]: I0104 12:03:46.161248 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pqsnh" event={"ID":"eff85255-cd95-48af-b44d-5c92d35a095a","Type":"ContainerStarted","Data":"dce48eec3dcc3858d5227753f3717bd837e1c0e51c539ea656c2aec3268f9f44"} Jan 04 12:03:47 crc kubenswrapper[5003]: I0104 12:03:47.168979 5003 generic.go:334] "Generic (PLEG): container finished" podID="eff85255-cd95-48af-b44d-5c92d35a095a" containerID="dce48eec3dcc3858d5227753f3717bd837e1c0e51c539ea656c2aec3268f9f44" exitCode=0 Jan 04 12:03:47 crc kubenswrapper[5003]: I0104 12:03:47.169059 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pqsnh" event={"ID":"eff85255-cd95-48af-b44d-5c92d35a095a","Type":"ContainerDied","Data":"dce48eec3dcc3858d5227753f3717bd837e1c0e51c539ea656c2aec3268f9f44"} Jan 04 12:03:49 crc kubenswrapper[5003]: I0104 12:03:49.189643 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pqsnh" event={"ID":"eff85255-cd95-48af-b44d-5c92d35a095a","Type":"ContainerStarted","Data":"87190f1e73320079227336235e690007e62c92068fee5fcb76e2370082bb0876"} Jan 04 12:03:49 crc kubenswrapper[5003]: I0104 12:03:49.209321 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-pqsnh" podStartSLOduration=8.160259364 podStartE2EDuration="11.209296819s" podCreationTimestamp="2026-01-04 12:03:38 +0000 UTC" firstStartedPulling="2026-01-04 12:03:45.155482723 +0000 UTC m=+940.628512564" lastFinishedPulling="2026-01-04 12:03:48.204520188 +0000 UTC m=+943.677550019" observedRunningTime="2026-01-04 12:03:49.206404162 +0000 UTC m=+944.679434043" watchObservedRunningTime="2026-01-04 12:03:49.209296819 +0000 UTC m=+944.682326660" Jan 04 12:03:49 crc kubenswrapper[5003]: I0104 12:03:49.291904 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-f4fb5df64-hvhx7" Jan 04 12:03:52 crc kubenswrapper[5003]: I0104 12:03:52.129795 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-86cb77c54b-6h4sb"] Jan 04 12:03:52 crc kubenswrapper[5003]: I0104 12:03:52.131644 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-6h4sb" Jan 04 12:03:52 crc kubenswrapper[5003]: I0104 12:03:52.137814 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-6h4sb"] Jan 04 12:03:52 crc kubenswrapper[5003]: I0104 12:03:52.147119 5003 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-fsnpj" Jan 04 12:03:52 crc kubenswrapper[5003]: I0104 12:03:52.148862 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9zc9l\" (UniqueName: \"kubernetes.io/projected/c58a6400-ce47-4bb9-873d-12da105e4794-kube-api-access-9zc9l\") pod \"cert-manager-86cb77c54b-6h4sb\" (UID: \"c58a6400-ce47-4bb9-873d-12da105e4794\") " pod="cert-manager/cert-manager-86cb77c54b-6h4sb" Jan 04 12:03:52 crc kubenswrapper[5003]: I0104 12:03:52.148949 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c58a6400-ce47-4bb9-873d-12da105e4794-bound-sa-token\") pod \"cert-manager-86cb77c54b-6h4sb\" (UID: \"c58a6400-ce47-4bb9-873d-12da105e4794\") " pod="cert-manager/cert-manager-86cb77c54b-6h4sb" Jan 04 12:03:52 crc kubenswrapper[5003]: I0104 12:03:52.250373 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c58a6400-ce47-4bb9-873d-12da105e4794-bound-sa-token\") pod \"cert-manager-86cb77c54b-6h4sb\" (UID: \"c58a6400-ce47-4bb9-873d-12da105e4794\") " pod="cert-manager/cert-manager-86cb77c54b-6h4sb" Jan 04 12:03:52 crc kubenswrapper[5003]: I0104 12:03:52.250495 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9zc9l\" (UniqueName: \"kubernetes.io/projected/c58a6400-ce47-4bb9-873d-12da105e4794-kube-api-access-9zc9l\") pod \"cert-manager-86cb77c54b-6h4sb\" (UID: \"c58a6400-ce47-4bb9-873d-12da105e4794\") " pod="cert-manager/cert-manager-86cb77c54b-6h4sb" Jan 04 12:03:52 crc kubenswrapper[5003]: I0104 12:03:52.270188 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c58a6400-ce47-4bb9-873d-12da105e4794-bound-sa-token\") pod \"cert-manager-86cb77c54b-6h4sb\" (UID: \"c58a6400-ce47-4bb9-873d-12da105e4794\") " pod="cert-manager/cert-manager-86cb77c54b-6h4sb" Jan 04 12:03:52 crc kubenswrapper[5003]: I0104 12:03:52.276211 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9zc9l\" (UniqueName: \"kubernetes.io/projected/c58a6400-ce47-4bb9-873d-12da105e4794-kube-api-access-9zc9l\") pod \"cert-manager-86cb77c54b-6h4sb\" (UID: \"c58a6400-ce47-4bb9-873d-12da105e4794\") " pod="cert-manager/cert-manager-86cb77c54b-6h4sb" Jan 04 12:03:52 crc kubenswrapper[5003]: I0104 12:03:52.451482 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-6h4sb" Jan 04 12:03:52 crc kubenswrapper[5003]: I0104 12:03:52.716613 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-6h4sb"] Jan 04 12:03:53 crc kubenswrapper[5003]: I0104 12:03:53.216730 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-6h4sb" event={"ID":"c58a6400-ce47-4bb9-873d-12da105e4794","Type":"ContainerStarted","Data":"bd699a8e87cf27f04997581638d6bb6f10833d60750e4ef9d5a6507dc4f6824b"} Jan 04 12:03:53 crc kubenswrapper[5003]: I0104 12:03:53.218468 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-6h4sb" event={"ID":"c58a6400-ce47-4bb9-873d-12da105e4794","Type":"ContainerStarted","Data":"54e5ba79c24b8d0a87e877242a2cb0abc89cc37c8334c2607c1a254909585651"} Jan 04 12:03:53 crc kubenswrapper[5003]: I0104 12:03:53.233311 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-86cb77c54b-6h4sb" podStartSLOduration=1.233287993 podStartE2EDuration="1.233287993s" podCreationTimestamp="2026-01-04 12:03:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:03:53.229446171 +0000 UTC m=+948.702476012" watchObservedRunningTime="2026-01-04 12:03:53.233287993 +0000 UTC m=+948.706317834" Jan 04 12:03:58 crc kubenswrapper[5003]: I0104 12:03:58.819292 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-pqsnh" Jan 04 12:03:58 crc kubenswrapper[5003]: I0104 12:03:58.819933 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-pqsnh" Jan 04 12:03:58 crc kubenswrapper[5003]: I0104 12:03:58.861449 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-pqsnh" Jan 04 12:03:59 crc kubenswrapper[5003]: I0104 12:03:59.294449 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-pqsnh" Jan 04 12:03:59 crc kubenswrapper[5003]: I0104 12:03:59.333204 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pqsnh"] Jan 04 12:04:01 crc kubenswrapper[5003]: I0104 12:04:01.275678 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-pqsnh" podUID="eff85255-cd95-48af-b44d-5c92d35a095a" containerName="registry-server" containerID="cri-o://87190f1e73320079227336235e690007e62c92068fee5fcb76e2370082bb0876" gracePeriod=2 Jan 04 12:04:03 crc kubenswrapper[5003]: I0104 12:04:03.497244 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-gdng2"] Jan 04 12:04:03 crc kubenswrapper[5003]: I0104 12:04:03.507300 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-gdng2"] Jan 04 12:04:03 crc kubenswrapper[5003]: I0104 12:04:03.507484 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-gdng2" Jan 04 12:04:03 crc kubenswrapper[5003]: I0104 12:04:03.509586 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Jan 04 12:04:03 crc kubenswrapper[5003]: I0104 12:04:03.510058 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-mr6fn" Jan 04 12:04:03 crc kubenswrapper[5003]: I0104 12:04:03.510097 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Jan 04 12:04:03 crc kubenswrapper[5003]: I0104 12:04:03.608464 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qsnzv\" (UniqueName: \"kubernetes.io/projected/e9eb7ccc-fe7a-4535-9757-76564d7b7f25-kube-api-access-qsnzv\") pod \"openstack-operator-index-gdng2\" (UID: \"e9eb7ccc-fe7a-4535-9757-76564d7b7f25\") " pod="openstack-operators/openstack-operator-index-gdng2" Jan 04 12:04:03 crc kubenswrapper[5003]: I0104 12:04:03.710153 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qsnzv\" (UniqueName: \"kubernetes.io/projected/e9eb7ccc-fe7a-4535-9757-76564d7b7f25-kube-api-access-qsnzv\") pod \"openstack-operator-index-gdng2\" (UID: \"e9eb7ccc-fe7a-4535-9757-76564d7b7f25\") " pod="openstack-operators/openstack-operator-index-gdng2" Jan 04 12:04:03 crc kubenswrapper[5003]: I0104 12:04:03.736827 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qsnzv\" (UniqueName: \"kubernetes.io/projected/e9eb7ccc-fe7a-4535-9757-76564d7b7f25-kube-api-access-qsnzv\") pod \"openstack-operator-index-gdng2\" (UID: \"e9eb7ccc-fe7a-4535-9757-76564d7b7f25\") " pod="openstack-operators/openstack-operator-index-gdng2" Jan 04 12:04:03 crc kubenswrapper[5003]: I0104 12:04:03.823535 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-gdng2" Jan 04 12:04:04 crc kubenswrapper[5003]: I0104 12:04:04.051835 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-gdng2"] Jan 04 12:04:04 crc kubenswrapper[5003]: I0104 12:04:04.297081 5003 generic.go:334] "Generic (PLEG): container finished" podID="eff85255-cd95-48af-b44d-5c92d35a095a" containerID="87190f1e73320079227336235e690007e62c92068fee5fcb76e2370082bb0876" exitCode=0 Jan 04 12:04:04 crc kubenswrapper[5003]: I0104 12:04:04.297164 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pqsnh" event={"ID":"eff85255-cd95-48af-b44d-5c92d35a095a","Type":"ContainerDied","Data":"87190f1e73320079227336235e690007e62c92068fee5fcb76e2370082bb0876"} Jan 04 12:04:04 crc kubenswrapper[5003]: I0104 12:04:04.298771 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-gdng2" event={"ID":"e9eb7ccc-fe7a-4535-9757-76564d7b7f25","Type":"ContainerStarted","Data":"b61d38a009baf3064a205ed261bdbcfc44e2ef57b1e80a10df02442e3c4b8906"} Jan 04 12:04:04 crc kubenswrapper[5003]: I0104 12:04:04.340281 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pqsnh" Jan 04 12:04:04 crc kubenswrapper[5003]: I0104 12:04:04.523919 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eff85255-cd95-48af-b44d-5c92d35a095a-utilities\") pod \"eff85255-cd95-48af-b44d-5c92d35a095a\" (UID: \"eff85255-cd95-48af-b44d-5c92d35a095a\") " Jan 04 12:04:04 crc kubenswrapper[5003]: I0104 12:04:04.524007 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eff85255-cd95-48af-b44d-5c92d35a095a-catalog-content\") pod \"eff85255-cd95-48af-b44d-5c92d35a095a\" (UID: \"eff85255-cd95-48af-b44d-5c92d35a095a\") " Jan 04 12:04:04 crc kubenswrapper[5003]: I0104 12:04:04.524080 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pzq4v\" (UniqueName: \"kubernetes.io/projected/eff85255-cd95-48af-b44d-5c92d35a095a-kube-api-access-pzq4v\") pod \"eff85255-cd95-48af-b44d-5c92d35a095a\" (UID: \"eff85255-cd95-48af-b44d-5c92d35a095a\") " Jan 04 12:04:04 crc kubenswrapper[5003]: I0104 12:04:04.524954 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eff85255-cd95-48af-b44d-5c92d35a095a-utilities" (OuterVolumeSpecName: "utilities") pod "eff85255-cd95-48af-b44d-5c92d35a095a" (UID: "eff85255-cd95-48af-b44d-5c92d35a095a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:04:04 crc kubenswrapper[5003]: I0104 12:04:04.530746 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eff85255-cd95-48af-b44d-5c92d35a095a-kube-api-access-pzq4v" (OuterVolumeSpecName: "kube-api-access-pzq4v") pod "eff85255-cd95-48af-b44d-5c92d35a095a" (UID: "eff85255-cd95-48af-b44d-5c92d35a095a"). InnerVolumeSpecName "kube-api-access-pzq4v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:04:04 crc kubenswrapper[5003]: I0104 12:04:04.577177 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eff85255-cd95-48af-b44d-5c92d35a095a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eff85255-cd95-48af-b44d-5c92d35a095a" (UID: "eff85255-cd95-48af-b44d-5c92d35a095a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:04:04 crc kubenswrapper[5003]: I0104 12:04:04.625879 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pzq4v\" (UniqueName: \"kubernetes.io/projected/eff85255-cd95-48af-b44d-5c92d35a095a-kube-api-access-pzq4v\") on node \"crc\" DevicePath \"\"" Jan 04 12:04:04 crc kubenswrapper[5003]: I0104 12:04:04.625913 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eff85255-cd95-48af-b44d-5c92d35a095a-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:04:04 crc kubenswrapper[5003]: I0104 12:04:04.625922 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eff85255-cd95-48af-b44d-5c92d35a095a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:04:05 crc kubenswrapper[5003]: I0104 12:04:05.316705 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pqsnh" event={"ID":"eff85255-cd95-48af-b44d-5c92d35a095a","Type":"ContainerDied","Data":"d6a9f7551689795ee8bf4361bf481c8b06ccd2aa0bb8e6b57f509dd64017e9fb"} Jan 04 12:04:05 crc kubenswrapper[5003]: I0104 12:04:05.316763 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pqsnh" Jan 04 12:04:05 crc kubenswrapper[5003]: I0104 12:04:05.317128 5003 scope.go:117] "RemoveContainer" containerID="87190f1e73320079227336235e690007e62c92068fee5fcb76e2370082bb0876" Jan 04 12:04:05 crc kubenswrapper[5003]: I0104 12:04:05.344268 5003 scope.go:117] "RemoveContainer" containerID="dce48eec3dcc3858d5227753f3717bd837e1c0e51c539ea656c2aec3268f9f44" Jan 04 12:04:05 crc kubenswrapper[5003]: I0104 12:04:05.358548 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pqsnh"] Jan 04 12:04:05 crc kubenswrapper[5003]: I0104 12:04:05.362978 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-pqsnh"] Jan 04 12:04:05 crc kubenswrapper[5003]: I0104 12:04:05.382337 5003 scope.go:117] "RemoveContainer" containerID="83d0fe71f787cfbde7b02bf8d751960c54997e6d734db1902fd39ac9a48e05e0" Jan 04 12:04:06 crc kubenswrapper[5003]: I0104 12:04:06.326222 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-gdng2" event={"ID":"e9eb7ccc-fe7a-4535-9757-76564d7b7f25","Type":"ContainerStarted","Data":"7daad52b8b674a9f4ec5ccb6d4c9f6fbe29fae427c17d5f11b19923920f97936"} Jan 04 12:04:06 crc kubenswrapper[5003]: I0104 12:04:06.346696 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-gdng2" podStartSLOduration=2.171762836 podStartE2EDuration="3.346653936s" podCreationTimestamp="2026-01-04 12:04:03 +0000 UTC" firstStartedPulling="2026-01-04 12:04:04.065358096 +0000 UTC m=+959.538387937" lastFinishedPulling="2026-01-04 12:04:05.240249186 +0000 UTC m=+960.713279037" observedRunningTime="2026-01-04 12:04:06.341555581 +0000 UTC m=+961.814585422" watchObservedRunningTime="2026-01-04 12:04:06.346653936 +0000 UTC m=+961.819683787" Jan 04 12:04:06 crc kubenswrapper[5003]: I0104 12:04:06.813958 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eff85255-cd95-48af-b44d-5c92d35a095a" path="/var/lib/kubelet/pods/eff85255-cd95-48af-b44d-5c92d35a095a/volumes" Jan 04 12:04:07 crc kubenswrapper[5003]: E0104 12:04:07.821453 5003 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeff85255_cd95_48af_b44d_5c92d35a095a.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeff85255_cd95_48af_b44d_5c92d35a095a.slice/crio-d6a9f7551689795ee8bf4361bf481c8b06ccd2aa0bb8e6b57f509dd64017e9fb\": RecentStats: unable to find data in memory cache]" Jan 04 12:04:08 crc kubenswrapper[5003]: I0104 12:04:08.869985 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-gdng2"] Jan 04 12:04:08 crc kubenswrapper[5003]: I0104 12:04:08.870601 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-gdng2" podUID="e9eb7ccc-fe7a-4535-9757-76564d7b7f25" containerName="registry-server" containerID="cri-o://7daad52b8b674a9f4ec5ccb6d4c9f6fbe29fae427c17d5f11b19923920f97936" gracePeriod=2 Jan 04 12:04:09 crc kubenswrapper[5003]: I0104 12:04:09.264729 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-gdng2" Jan 04 12:04:09 crc kubenswrapper[5003]: I0104 12:04:09.352414 5003 generic.go:334] "Generic (PLEG): container finished" podID="e9eb7ccc-fe7a-4535-9757-76564d7b7f25" containerID="7daad52b8b674a9f4ec5ccb6d4c9f6fbe29fae427c17d5f11b19923920f97936" exitCode=0 Jan 04 12:04:09 crc kubenswrapper[5003]: I0104 12:04:09.352462 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-gdng2" event={"ID":"e9eb7ccc-fe7a-4535-9757-76564d7b7f25","Type":"ContainerDied","Data":"7daad52b8b674a9f4ec5ccb6d4c9f6fbe29fae427c17d5f11b19923920f97936"} Jan 04 12:04:09 crc kubenswrapper[5003]: I0104 12:04:09.352493 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-gdng2" Jan 04 12:04:09 crc kubenswrapper[5003]: I0104 12:04:09.352551 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-gdng2" event={"ID":"e9eb7ccc-fe7a-4535-9757-76564d7b7f25","Type":"ContainerDied","Data":"b61d38a009baf3064a205ed261bdbcfc44e2ef57b1e80a10df02442e3c4b8906"} Jan 04 12:04:09 crc kubenswrapper[5003]: I0104 12:04:09.352572 5003 scope.go:117] "RemoveContainer" containerID="7daad52b8b674a9f4ec5ccb6d4c9f6fbe29fae427c17d5f11b19923920f97936" Jan 04 12:04:09 crc kubenswrapper[5003]: I0104 12:04:09.380615 5003 scope.go:117] "RemoveContainer" containerID="7daad52b8b674a9f4ec5ccb6d4c9f6fbe29fae427c17d5f11b19923920f97936" Jan 04 12:04:09 crc kubenswrapper[5003]: E0104 12:04:09.381319 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7daad52b8b674a9f4ec5ccb6d4c9f6fbe29fae427c17d5f11b19923920f97936\": container with ID starting with 7daad52b8b674a9f4ec5ccb6d4c9f6fbe29fae427c17d5f11b19923920f97936 not found: ID does not exist" containerID="7daad52b8b674a9f4ec5ccb6d4c9f6fbe29fae427c17d5f11b19923920f97936" Jan 04 12:04:09 crc kubenswrapper[5003]: I0104 12:04:09.381394 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7daad52b8b674a9f4ec5ccb6d4c9f6fbe29fae427c17d5f11b19923920f97936"} err="failed to get container status \"7daad52b8b674a9f4ec5ccb6d4c9f6fbe29fae427c17d5f11b19923920f97936\": rpc error: code = NotFound desc = could not find container \"7daad52b8b674a9f4ec5ccb6d4c9f6fbe29fae427c17d5f11b19923920f97936\": container with ID starting with 7daad52b8b674a9f4ec5ccb6d4c9f6fbe29fae427c17d5f11b19923920f97936 not found: ID does not exist" Jan 04 12:04:09 crc kubenswrapper[5003]: I0104 12:04:09.402113 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qsnzv\" (UniqueName: \"kubernetes.io/projected/e9eb7ccc-fe7a-4535-9757-76564d7b7f25-kube-api-access-qsnzv\") pod \"e9eb7ccc-fe7a-4535-9757-76564d7b7f25\" (UID: \"e9eb7ccc-fe7a-4535-9757-76564d7b7f25\") " Jan 04 12:04:09 crc kubenswrapper[5003]: I0104 12:04:09.409843 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9eb7ccc-fe7a-4535-9757-76564d7b7f25-kube-api-access-qsnzv" (OuterVolumeSpecName: "kube-api-access-qsnzv") pod "e9eb7ccc-fe7a-4535-9757-76564d7b7f25" (UID: "e9eb7ccc-fe7a-4535-9757-76564d7b7f25"). InnerVolumeSpecName "kube-api-access-qsnzv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:04:09 crc kubenswrapper[5003]: I0104 12:04:09.418578 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:04:09 crc kubenswrapper[5003]: I0104 12:04:09.418662 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:04:09 crc kubenswrapper[5003]: I0104 12:04:09.503192 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qsnzv\" (UniqueName: \"kubernetes.io/projected/e9eb7ccc-fe7a-4535-9757-76564d7b7f25-kube-api-access-qsnzv\") on node \"crc\" DevicePath \"\"" Jan 04 12:04:09 crc kubenswrapper[5003]: I0104 12:04:09.674944 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-fswhr"] Jan 04 12:04:09 crc kubenswrapper[5003]: E0104 12:04:09.679130 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eff85255-cd95-48af-b44d-5c92d35a095a" containerName="registry-server" Jan 04 12:04:09 crc kubenswrapper[5003]: I0104 12:04:09.679169 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="eff85255-cd95-48af-b44d-5c92d35a095a" containerName="registry-server" Jan 04 12:04:09 crc kubenswrapper[5003]: E0104 12:04:09.679181 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9eb7ccc-fe7a-4535-9757-76564d7b7f25" containerName="registry-server" Jan 04 12:04:09 crc kubenswrapper[5003]: I0104 12:04:09.679192 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9eb7ccc-fe7a-4535-9757-76564d7b7f25" containerName="registry-server" Jan 04 12:04:09 crc kubenswrapper[5003]: E0104 12:04:09.679208 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eff85255-cd95-48af-b44d-5c92d35a095a" containerName="extract-utilities" Jan 04 12:04:09 crc kubenswrapper[5003]: I0104 12:04:09.679217 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="eff85255-cd95-48af-b44d-5c92d35a095a" containerName="extract-utilities" Jan 04 12:04:09 crc kubenswrapper[5003]: E0104 12:04:09.679229 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eff85255-cd95-48af-b44d-5c92d35a095a" containerName="extract-content" Jan 04 12:04:09 crc kubenswrapper[5003]: I0104 12:04:09.679235 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="eff85255-cd95-48af-b44d-5c92d35a095a" containerName="extract-content" Jan 04 12:04:09 crc kubenswrapper[5003]: I0104 12:04:09.679356 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="eff85255-cd95-48af-b44d-5c92d35a095a" containerName="registry-server" Jan 04 12:04:09 crc kubenswrapper[5003]: I0104 12:04:09.679370 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9eb7ccc-fe7a-4535-9757-76564d7b7f25" containerName="registry-server" Jan 04 12:04:09 crc kubenswrapper[5003]: I0104 12:04:09.680141 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-fswhr" Jan 04 12:04:09 crc kubenswrapper[5003]: I0104 12:04:09.683835 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-gdng2"] Jan 04 12:04:09 crc kubenswrapper[5003]: I0104 12:04:09.686232 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Jan 04 12:04:09 crc kubenswrapper[5003]: I0104 12:04:09.686814 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-mr6fn" Jan 04 12:04:09 crc kubenswrapper[5003]: I0104 12:04:09.687027 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Jan 04 12:04:09 crc kubenswrapper[5003]: I0104 12:04:09.691371 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-gdng2"] Jan 04 12:04:09 crc kubenswrapper[5003]: I0104 12:04:09.698811 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-fswhr"] Jan 04 12:04:09 crc kubenswrapper[5003]: I0104 12:04:09.808176 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xthrp\" (UniqueName: \"kubernetes.io/projected/1ed8e853-ba61-448c-a7bb-3cfeb66a6c81-kube-api-access-xthrp\") pod \"openstack-operator-index-fswhr\" (UID: \"1ed8e853-ba61-448c-a7bb-3cfeb66a6c81\") " pod="openstack-operators/openstack-operator-index-fswhr" Jan 04 12:04:09 crc kubenswrapper[5003]: I0104 12:04:09.909564 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xthrp\" (UniqueName: \"kubernetes.io/projected/1ed8e853-ba61-448c-a7bb-3cfeb66a6c81-kube-api-access-xthrp\") pod \"openstack-operator-index-fswhr\" (UID: \"1ed8e853-ba61-448c-a7bb-3cfeb66a6c81\") " pod="openstack-operators/openstack-operator-index-fswhr" Jan 04 12:04:09 crc kubenswrapper[5003]: I0104 12:04:09.933722 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xthrp\" (UniqueName: \"kubernetes.io/projected/1ed8e853-ba61-448c-a7bb-3cfeb66a6c81-kube-api-access-xthrp\") pod \"openstack-operator-index-fswhr\" (UID: \"1ed8e853-ba61-448c-a7bb-3cfeb66a6c81\") " pod="openstack-operators/openstack-operator-index-fswhr" Jan 04 12:04:09 crc kubenswrapper[5003]: I0104 12:04:09.994297 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-fswhr" Jan 04 12:04:10 crc kubenswrapper[5003]: I0104 12:04:10.225256 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-fswhr"] Jan 04 12:04:10 crc kubenswrapper[5003]: I0104 12:04:10.361429 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-fswhr" event={"ID":"1ed8e853-ba61-448c-a7bb-3cfeb66a6c81","Type":"ContainerStarted","Data":"d3c42caf21af954f7f6a680f96f823ddf15d4be3e3a9fe9039ab8dcfc8f0b268"} Jan 04 12:04:10 crc kubenswrapper[5003]: I0104 12:04:10.822152 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9eb7ccc-fe7a-4535-9757-76564d7b7f25" path="/var/lib/kubelet/pods/e9eb7ccc-fe7a-4535-9757-76564d7b7f25/volumes" Jan 04 12:04:11 crc kubenswrapper[5003]: I0104 12:04:11.372638 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-fswhr" event={"ID":"1ed8e853-ba61-448c-a7bb-3cfeb66a6c81","Type":"ContainerStarted","Data":"0556da70f884f0a89a0f773b61f6008793b87b1e9c999613d2bd21fb4ea95418"} Jan 04 12:04:11 crc kubenswrapper[5003]: I0104 12:04:11.400920 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-fswhr" podStartSLOduration=1.885948328 podStartE2EDuration="2.400893087s" podCreationTimestamp="2026-01-04 12:04:09 +0000 UTC" firstStartedPulling="2026-01-04 12:04:10.240642997 +0000 UTC m=+965.713672838" lastFinishedPulling="2026-01-04 12:04:10.755587756 +0000 UTC m=+966.228617597" observedRunningTime="2026-01-04 12:04:11.396268745 +0000 UTC m=+966.869298596" watchObservedRunningTime="2026-01-04 12:04:11.400893087 +0000 UTC m=+966.873922938" Jan 04 12:04:17 crc kubenswrapper[5003]: E0104 12:04:17.978494 5003 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeff85255_cd95_48af_b44d_5c92d35a095a.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeff85255_cd95_48af_b44d_5c92d35a095a.slice/crio-d6a9f7551689795ee8bf4361bf481c8b06ccd2aa0bb8e6b57f509dd64017e9fb\": RecentStats: unable to find data in memory cache]" Jan 04 12:04:19 crc kubenswrapper[5003]: I0104 12:04:19.994735 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-fswhr" Jan 04 12:04:19 crc kubenswrapper[5003]: I0104 12:04:19.995191 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-fswhr" Jan 04 12:04:20 crc kubenswrapper[5003]: I0104 12:04:20.032105 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-fswhr" Jan 04 12:04:20 crc kubenswrapper[5003]: I0104 12:04:20.461441 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-fswhr" Jan 04 12:04:26 crc kubenswrapper[5003]: I0104 12:04:26.121155 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx"] Jan 04 12:04:26 crc kubenswrapper[5003]: I0104 12:04:26.124127 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx" Jan 04 12:04:26 crc kubenswrapper[5003]: I0104 12:04:26.126447 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx"] Jan 04 12:04:26 crc kubenswrapper[5003]: I0104 12:04:26.127305 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-pvljz" Jan 04 12:04:26 crc kubenswrapper[5003]: I0104 12:04:26.263123 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4eb99728-449d-4016-86e5-47e74e5f97e9-bundle\") pod \"de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx\" (UID: \"4eb99728-449d-4016-86e5-47e74e5f97e9\") " pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx" Jan 04 12:04:26 crc kubenswrapper[5003]: I0104 12:04:26.263632 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7rsgk\" (UniqueName: \"kubernetes.io/projected/4eb99728-449d-4016-86e5-47e74e5f97e9-kube-api-access-7rsgk\") pod \"de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx\" (UID: \"4eb99728-449d-4016-86e5-47e74e5f97e9\") " pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx" Jan 04 12:04:26 crc kubenswrapper[5003]: I0104 12:04:26.263765 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4eb99728-449d-4016-86e5-47e74e5f97e9-util\") pod \"de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx\" (UID: \"4eb99728-449d-4016-86e5-47e74e5f97e9\") " pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx" Jan 04 12:04:26 crc kubenswrapper[5003]: I0104 12:04:26.365623 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4eb99728-449d-4016-86e5-47e74e5f97e9-bundle\") pod \"de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx\" (UID: \"4eb99728-449d-4016-86e5-47e74e5f97e9\") " pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx" Jan 04 12:04:26 crc kubenswrapper[5003]: I0104 12:04:26.365690 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7rsgk\" (UniqueName: \"kubernetes.io/projected/4eb99728-449d-4016-86e5-47e74e5f97e9-kube-api-access-7rsgk\") pod \"de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx\" (UID: \"4eb99728-449d-4016-86e5-47e74e5f97e9\") " pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx" Jan 04 12:04:26 crc kubenswrapper[5003]: I0104 12:04:26.365746 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4eb99728-449d-4016-86e5-47e74e5f97e9-util\") pod \"de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx\" (UID: \"4eb99728-449d-4016-86e5-47e74e5f97e9\") " pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx" Jan 04 12:04:26 crc kubenswrapper[5003]: I0104 12:04:26.366219 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4eb99728-449d-4016-86e5-47e74e5f97e9-bundle\") pod \"de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx\" (UID: \"4eb99728-449d-4016-86e5-47e74e5f97e9\") " pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx" Jan 04 12:04:26 crc kubenswrapper[5003]: I0104 12:04:26.366375 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4eb99728-449d-4016-86e5-47e74e5f97e9-util\") pod \"de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx\" (UID: \"4eb99728-449d-4016-86e5-47e74e5f97e9\") " pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx" Jan 04 12:04:26 crc kubenswrapper[5003]: I0104 12:04:26.390733 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7rsgk\" (UniqueName: \"kubernetes.io/projected/4eb99728-449d-4016-86e5-47e74e5f97e9-kube-api-access-7rsgk\") pod \"de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx\" (UID: \"4eb99728-449d-4016-86e5-47e74e5f97e9\") " pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx" Jan 04 12:04:26 crc kubenswrapper[5003]: I0104 12:04:26.450735 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx" Jan 04 12:04:26 crc kubenswrapper[5003]: I0104 12:04:26.700857 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx"] Jan 04 12:04:26 crc kubenswrapper[5003]: W0104 12:04:26.710144 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4eb99728_449d_4016_86e5_47e74e5f97e9.slice/crio-0e0c89ade48e2784ef0b67886554732706a15866536706f54c72273a45ba98c3 WatchSource:0}: Error finding container 0e0c89ade48e2784ef0b67886554732706a15866536706f54c72273a45ba98c3: Status 404 returned error can't find the container with id 0e0c89ade48e2784ef0b67886554732706a15866536706f54c72273a45ba98c3 Jan 04 12:04:27 crc kubenswrapper[5003]: I0104 12:04:27.495542 5003 generic.go:334] "Generic (PLEG): container finished" podID="4eb99728-449d-4016-86e5-47e74e5f97e9" containerID="7ada91832be7c65a026d1641d0dd5ed7efae504789923ffc3a0650f06687cf75" exitCode=0 Jan 04 12:04:27 crc kubenswrapper[5003]: I0104 12:04:27.495726 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx" event={"ID":"4eb99728-449d-4016-86e5-47e74e5f97e9","Type":"ContainerDied","Data":"7ada91832be7c65a026d1641d0dd5ed7efae504789923ffc3a0650f06687cf75"} Jan 04 12:04:27 crc kubenswrapper[5003]: I0104 12:04:27.495912 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx" event={"ID":"4eb99728-449d-4016-86e5-47e74e5f97e9","Type":"ContainerStarted","Data":"0e0c89ade48e2784ef0b67886554732706a15866536706f54c72273a45ba98c3"} Jan 04 12:04:28 crc kubenswrapper[5003]: E0104 12:04:28.132309 5003 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeff85255_cd95_48af_b44d_5c92d35a095a.slice/crio-d6a9f7551689795ee8bf4361bf481c8b06ccd2aa0bb8e6b57f509dd64017e9fb\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeff85255_cd95_48af_b44d_5c92d35a095a.slice\": RecentStats: unable to find data in memory cache]" Jan 04 12:04:28 crc kubenswrapper[5003]: I0104 12:04:28.508568 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx" event={"ID":"4eb99728-449d-4016-86e5-47e74e5f97e9","Type":"ContainerStarted","Data":"f711d895b9500921040d13cb65d8fe530a3d5b79656e3da86578af4607cafbf0"} Jan 04 12:04:29 crc kubenswrapper[5003]: I0104 12:04:29.522813 5003 generic.go:334] "Generic (PLEG): container finished" podID="4eb99728-449d-4016-86e5-47e74e5f97e9" containerID="f711d895b9500921040d13cb65d8fe530a3d5b79656e3da86578af4607cafbf0" exitCode=0 Jan 04 12:04:29 crc kubenswrapper[5003]: I0104 12:04:29.522940 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx" event={"ID":"4eb99728-449d-4016-86e5-47e74e5f97e9","Type":"ContainerDied","Data":"f711d895b9500921040d13cb65d8fe530a3d5b79656e3da86578af4607cafbf0"} Jan 04 12:04:30 crc kubenswrapper[5003]: I0104 12:04:30.534914 5003 generic.go:334] "Generic (PLEG): container finished" podID="4eb99728-449d-4016-86e5-47e74e5f97e9" containerID="86ffdaf563cc7965a50d748948727f52350a27435c9e98874258f0178a4ab602" exitCode=0 Jan 04 12:04:30 crc kubenswrapper[5003]: I0104 12:04:30.534987 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx" event={"ID":"4eb99728-449d-4016-86e5-47e74e5f97e9","Type":"ContainerDied","Data":"86ffdaf563cc7965a50d748948727f52350a27435c9e98874258f0178a4ab602"} Jan 04 12:04:31 crc kubenswrapper[5003]: I0104 12:04:31.973159 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx" Jan 04 12:04:32 crc kubenswrapper[5003]: I0104 12:04:32.085222 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7rsgk\" (UniqueName: \"kubernetes.io/projected/4eb99728-449d-4016-86e5-47e74e5f97e9-kube-api-access-7rsgk\") pod \"4eb99728-449d-4016-86e5-47e74e5f97e9\" (UID: \"4eb99728-449d-4016-86e5-47e74e5f97e9\") " Jan 04 12:04:32 crc kubenswrapper[5003]: I0104 12:04:32.085326 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4eb99728-449d-4016-86e5-47e74e5f97e9-util\") pod \"4eb99728-449d-4016-86e5-47e74e5f97e9\" (UID: \"4eb99728-449d-4016-86e5-47e74e5f97e9\") " Jan 04 12:04:32 crc kubenswrapper[5003]: I0104 12:04:32.085396 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4eb99728-449d-4016-86e5-47e74e5f97e9-bundle\") pod \"4eb99728-449d-4016-86e5-47e74e5f97e9\" (UID: \"4eb99728-449d-4016-86e5-47e74e5f97e9\") " Jan 04 12:04:32 crc kubenswrapper[5003]: I0104 12:04:32.086400 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4eb99728-449d-4016-86e5-47e74e5f97e9-bundle" (OuterVolumeSpecName: "bundle") pod "4eb99728-449d-4016-86e5-47e74e5f97e9" (UID: "4eb99728-449d-4016-86e5-47e74e5f97e9"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:04:32 crc kubenswrapper[5003]: I0104 12:04:32.095323 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4eb99728-449d-4016-86e5-47e74e5f97e9-kube-api-access-7rsgk" (OuterVolumeSpecName: "kube-api-access-7rsgk") pod "4eb99728-449d-4016-86e5-47e74e5f97e9" (UID: "4eb99728-449d-4016-86e5-47e74e5f97e9"). InnerVolumeSpecName "kube-api-access-7rsgk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:04:32 crc kubenswrapper[5003]: I0104 12:04:32.187691 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7rsgk\" (UniqueName: \"kubernetes.io/projected/4eb99728-449d-4016-86e5-47e74e5f97e9-kube-api-access-7rsgk\") on node \"crc\" DevicePath \"\"" Jan 04 12:04:32 crc kubenswrapper[5003]: I0104 12:04:32.187753 5003 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4eb99728-449d-4016-86e5-47e74e5f97e9-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:04:32 crc kubenswrapper[5003]: I0104 12:04:32.336086 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4eb99728-449d-4016-86e5-47e74e5f97e9-util" (OuterVolumeSpecName: "util") pod "4eb99728-449d-4016-86e5-47e74e5f97e9" (UID: "4eb99728-449d-4016-86e5-47e74e5f97e9"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:04:32 crc kubenswrapper[5003]: I0104 12:04:32.390458 5003 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4eb99728-449d-4016-86e5-47e74e5f97e9-util\") on node \"crc\" DevicePath \"\"" Jan 04 12:04:32 crc kubenswrapper[5003]: I0104 12:04:32.562475 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx" event={"ID":"4eb99728-449d-4016-86e5-47e74e5f97e9","Type":"ContainerDied","Data":"0e0c89ade48e2784ef0b67886554732706a15866536706f54c72273a45ba98c3"} Jan 04 12:04:32 crc kubenswrapper[5003]: I0104 12:04:32.562527 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0e0c89ade48e2784ef0b67886554732706a15866536706f54c72273a45ba98c3" Jan 04 12:04:32 crc kubenswrapper[5003]: I0104 12:04:32.562535 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx" Jan 04 12:04:38 crc kubenswrapper[5003]: I0104 12:04:38.040899 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6879547b79-t72rf"] Jan 04 12:04:38 crc kubenswrapper[5003]: E0104 12:04:38.041506 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4eb99728-449d-4016-86e5-47e74e5f97e9" containerName="util" Jan 04 12:04:38 crc kubenswrapper[5003]: I0104 12:04:38.041519 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="4eb99728-449d-4016-86e5-47e74e5f97e9" containerName="util" Jan 04 12:04:38 crc kubenswrapper[5003]: E0104 12:04:38.041529 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4eb99728-449d-4016-86e5-47e74e5f97e9" containerName="pull" Jan 04 12:04:38 crc kubenswrapper[5003]: I0104 12:04:38.041535 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="4eb99728-449d-4016-86e5-47e74e5f97e9" containerName="pull" Jan 04 12:04:38 crc kubenswrapper[5003]: E0104 12:04:38.041546 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4eb99728-449d-4016-86e5-47e74e5f97e9" containerName="extract" Jan 04 12:04:38 crc kubenswrapper[5003]: I0104 12:04:38.041553 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="4eb99728-449d-4016-86e5-47e74e5f97e9" containerName="extract" Jan 04 12:04:38 crc kubenswrapper[5003]: I0104 12:04:38.041669 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="4eb99728-449d-4016-86e5-47e74e5f97e9" containerName="extract" Jan 04 12:04:38 crc kubenswrapper[5003]: I0104 12:04:38.042120 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-6879547b79-t72rf" Jan 04 12:04:38 crc kubenswrapper[5003]: I0104 12:04:38.048462 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-pk7xz" Jan 04 12:04:38 crc kubenswrapper[5003]: I0104 12:04:38.072921 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6879547b79-t72rf"] Jan 04 12:04:38 crc kubenswrapper[5003]: I0104 12:04:38.180890 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bl989\" (UniqueName: \"kubernetes.io/projected/24d858a4-77b9-4c14-a43e-535f93982288-kube-api-access-bl989\") pod \"openstack-operator-controller-operator-6879547b79-t72rf\" (UID: \"24d858a4-77b9-4c14-a43e-535f93982288\") " pod="openstack-operators/openstack-operator-controller-operator-6879547b79-t72rf" Jan 04 12:04:38 crc kubenswrapper[5003]: I0104 12:04:38.282710 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bl989\" (UniqueName: \"kubernetes.io/projected/24d858a4-77b9-4c14-a43e-535f93982288-kube-api-access-bl989\") pod \"openstack-operator-controller-operator-6879547b79-t72rf\" (UID: \"24d858a4-77b9-4c14-a43e-535f93982288\") " pod="openstack-operators/openstack-operator-controller-operator-6879547b79-t72rf" Jan 04 12:04:38 crc kubenswrapper[5003]: E0104 12:04:38.317803 5003 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeff85255_cd95_48af_b44d_5c92d35a095a.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeff85255_cd95_48af_b44d_5c92d35a095a.slice/crio-d6a9f7551689795ee8bf4361bf481c8b06ccd2aa0bb8e6b57f509dd64017e9fb\": RecentStats: unable to find data in memory cache]" Jan 04 12:04:38 crc kubenswrapper[5003]: I0104 12:04:38.321790 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bl989\" (UniqueName: \"kubernetes.io/projected/24d858a4-77b9-4c14-a43e-535f93982288-kube-api-access-bl989\") pod \"openstack-operator-controller-operator-6879547b79-t72rf\" (UID: \"24d858a4-77b9-4c14-a43e-535f93982288\") " pod="openstack-operators/openstack-operator-controller-operator-6879547b79-t72rf" Jan 04 12:04:38 crc kubenswrapper[5003]: I0104 12:04:38.366279 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-6879547b79-t72rf" Jan 04 12:04:38 crc kubenswrapper[5003]: I0104 12:04:38.682129 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6879547b79-t72rf"] Jan 04 12:04:39 crc kubenswrapper[5003]: I0104 12:04:39.419302 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:04:39 crc kubenswrapper[5003]: I0104 12:04:39.419861 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:04:39 crc kubenswrapper[5003]: I0104 12:04:39.419919 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 12:04:39 crc kubenswrapper[5003]: I0104 12:04:39.420589 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4118d04e178a916ef0fb795859c3a8da20b43a18e967d67161d5ece95b07366c"} pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 12:04:39 crc kubenswrapper[5003]: I0104 12:04:39.420647 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" containerID="cri-o://4118d04e178a916ef0fb795859c3a8da20b43a18e967d67161d5ece95b07366c" gracePeriod=600 Jan 04 12:04:39 crc kubenswrapper[5003]: I0104 12:04:39.627425 5003 generic.go:334] "Generic (PLEG): container finished" podID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerID="4118d04e178a916ef0fb795859c3a8da20b43a18e967d67161d5ece95b07366c" exitCode=0 Jan 04 12:04:39 crc kubenswrapper[5003]: I0104 12:04:39.627492 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerDied","Data":"4118d04e178a916ef0fb795859c3a8da20b43a18e967d67161d5ece95b07366c"} Jan 04 12:04:39 crc kubenswrapper[5003]: I0104 12:04:39.627556 5003 scope.go:117] "RemoveContainer" containerID="544543b77ddff68504c56117c730883967d4ef6eb8006a6d7bde181f583bbabc" Jan 04 12:04:39 crc kubenswrapper[5003]: I0104 12:04:39.628812 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6879547b79-t72rf" event={"ID":"24d858a4-77b9-4c14-a43e-535f93982288","Type":"ContainerStarted","Data":"7557845fa57abbc46663b013bd5156da5aba0c7ec41d1cb2fb3e7cb38ccd48b3"} Jan 04 12:04:40 crc kubenswrapper[5003]: I0104 12:04:40.639855 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerStarted","Data":"8805b4e8959ecfe4ed6d4f63f07630a7e965c7249b0c281ceae8ee8943118856"} Jan 04 12:04:44 crc kubenswrapper[5003]: I0104 12:04:44.679734 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6879547b79-t72rf" event={"ID":"24d858a4-77b9-4c14-a43e-535f93982288","Type":"ContainerStarted","Data":"e24fd0c257095ffe03af554e539f9d36646e4b962246eddd035eea026110ae07"} Jan 04 12:04:44 crc kubenswrapper[5003]: I0104 12:04:44.680856 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-6879547b79-t72rf" Jan 04 12:04:44 crc kubenswrapper[5003]: I0104 12:04:44.729472 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-6879547b79-t72rf" podStartSLOduration=1.830106899 podStartE2EDuration="6.729428966s" podCreationTimestamp="2026-01-04 12:04:38 +0000 UTC" firstStartedPulling="2026-01-04 12:04:38.695548074 +0000 UTC m=+994.168577915" lastFinishedPulling="2026-01-04 12:04:43.594870141 +0000 UTC m=+999.067899982" observedRunningTime="2026-01-04 12:04:44.723390147 +0000 UTC m=+1000.196420058" watchObservedRunningTime="2026-01-04 12:04:44.729428966 +0000 UTC m=+1000.202458857" Jan 04 12:04:48 crc kubenswrapper[5003]: I0104 12:04:48.370868 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-6879547b79-t72rf" Jan 04 12:04:48 crc kubenswrapper[5003]: E0104 12:04:48.506551 5003 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeff85255_cd95_48af_b44d_5c92d35a095a.slice/crio-d6a9f7551689795ee8bf4361bf481c8b06ccd2aa0bb8e6b57f509dd64017e9fb\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeff85255_cd95_48af_b44d_5c92d35a095a.slice\": RecentStats: unable to find data in memory cache]" Jan 04 12:04:58 crc kubenswrapper[5003]: E0104 12:04:58.673428 5003 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeff85255_cd95_48af_b44d_5c92d35a095a.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeff85255_cd95_48af_b44d_5c92d35a095a.slice/crio-d6a9f7551689795ee8bf4361bf481c8b06ccd2aa0bb8e6b57f509dd64017e9fb\": RecentStats: unable to find data in memory cache]" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.644696 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-78979fc445-lng2r"] Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.646750 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-78979fc445-lng2r" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.649899 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-zpcdl" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.650080 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-f6f74d6db-89lgv"] Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.650984 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-f6f74d6db-89lgv" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.653743 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-9vdgq" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.658162 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-78979fc445-lng2r"] Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.669818 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-66f8b87655-5r27f"] Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.670641 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-66f8b87655-5r27f" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.672319 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-phwd5" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.691267 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-f6f74d6db-89lgv"] Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.723219 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-66f8b87655-5r27f"] Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.741218 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-7b549fc966-427tw"] Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.784811 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-7b549fc966-427tw" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.790187 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-l2cx2" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.794420 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-7b549fc966-427tw"] Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.816475 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-658dd65b86-fd9hs"] Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.817658 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-658dd65b86-fd9hs" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.819696 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-wc7f7" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.823148 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-658dd65b86-fd9hs"] Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.833666 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztrg6\" (UniqueName: \"kubernetes.io/projected/916a829e-a1f9-4c4c-9253-b5d1b901f2f3-kube-api-access-ztrg6\") pod \"cinder-operator-controller-manager-78979fc445-lng2r\" (UID: \"916a829e-a1f9-4c4c-9253-b5d1b901f2f3\") " pod="openstack-operators/cinder-operator-controller-manager-78979fc445-lng2r" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.833723 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xx76v\" (UniqueName: \"kubernetes.io/projected/c8ead0af-3ab9-4f13-b94f-83b8b48c60b7-kube-api-access-xx76v\") pod \"barbican-operator-controller-manager-f6f74d6db-89lgv\" (UID: \"c8ead0af-3ab9-4f13-b94f-83b8b48c60b7\") " pod="openstack-operators/barbican-operator-controller-manager-f6f74d6db-89lgv" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.833744 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ztbt\" (UniqueName: \"kubernetes.io/projected/8c9bc809-322b-4079-a1ea-533ce9239181-kube-api-access-9ztbt\") pod \"designate-operator-controller-manager-66f8b87655-5r27f\" (UID: \"8c9bc809-322b-4079-a1ea-533ce9239181\") " pod="openstack-operators/designate-operator-controller-manager-66f8b87655-5r27f" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.841102 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-7f5ddd8d7b-nbbqt"] Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.842188 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-7f5ddd8d7b-nbbqt" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.845601 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-j77h6" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.851503 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-7f5ddd8d7b-nbbqt"] Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.860103 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-6d99759cf-mwxxf"] Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.861482 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-6d99759cf-mwxxf" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.863697 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-g8wbn" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.865182 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.867958 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-6d99759cf-mwxxf"] Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.873523 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-f99f54bc8-j5hvw"] Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.874473 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-f99f54bc8-j5hvw" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.883945 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-rgsn4" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.884418 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-568985c78-94d29"] Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.885308 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-568985c78-94d29" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.890976 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-f99f54bc8-j5hvw"] Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.892215 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-vwhjk" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.905444 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-568985c78-94d29"] Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.918800 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-598945d5b8-869w7"] Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.920175 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-598945d5b8-869w7" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.927536 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-598945d5b8-869w7"] Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.936108 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6fsp\" (UniqueName: \"kubernetes.io/projected/549e2841-20b6-4018-85f2-0bc091560658-kube-api-access-d6fsp\") pod \"heat-operator-controller-manager-658dd65b86-fd9hs\" (UID: \"549e2841-20b6-4018-85f2-0bc091560658\") " pod="openstack-operators/heat-operator-controller-manager-658dd65b86-fd9hs" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.936211 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztrg6\" (UniqueName: \"kubernetes.io/projected/916a829e-a1f9-4c4c-9253-b5d1b901f2f3-kube-api-access-ztrg6\") pod \"cinder-operator-controller-manager-78979fc445-lng2r\" (UID: \"916a829e-a1f9-4c4c-9253-b5d1b901f2f3\") " pod="openstack-operators/cinder-operator-controller-manager-78979fc445-lng2r" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.936264 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xx76v\" (UniqueName: \"kubernetes.io/projected/c8ead0af-3ab9-4f13-b94f-83b8b48c60b7-kube-api-access-xx76v\") pod \"barbican-operator-controller-manager-f6f74d6db-89lgv\" (UID: \"c8ead0af-3ab9-4f13-b94f-83b8b48c60b7\") " pod="openstack-operators/barbican-operator-controller-manager-f6f74d6db-89lgv" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.936295 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ztbt\" (UniqueName: \"kubernetes.io/projected/8c9bc809-322b-4079-a1ea-533ce9239181-kube-api-access-9ztbt\") pod \"designate-operator-controller-manager-66f8b87655-5r27f\" (UID: \"8c9bc809-322b-4079-a1ea-533ce9239181\") " pod="openstack-operators/designate-operator-controller-manager-66f8b87655-5r27f" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.936327 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4qrf\" (UniqueName: \"kubernetes.io/projected/957cb0f0-d8f1-43a9-8d05-0b7db926c066-kube-api-access-k4qrf\") pod \"horizon-operator-controller-manager-7f5ddd8d7b-nbbqt\" (UID: \"957cb0f0-d8f1-43a9-8d05-0b7db926c066\") " pod="openstack-operators/horizon-operator-controller-manager-7f5ddd8d7b-nbbqt" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.936356 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jgt4\" (UniqueName: \"kubernetes.io/projected/218ad7b5-e7ca-4f1a-b863-1f160424b195-kube-api-access-4jgt4\") pod \"glance-operator-controller-manager-7b549fc966-427tw\" (UID: \"218ad7b5-e7ca-4f1a-b863-1f160424b195\") " pod="openstack-operators/glance-operator-controller-manager-7b549fc966-427tw" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.941328 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-hn7t6" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.964137 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-7b88bfc995-c45pv"] Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.965203 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-7b88bfc995-c45pv" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.973128 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-7cd87b778f-n2549"] Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.973825 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-n2549" Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.978161 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-7b88bfc995-c45pv"] Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.988560 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-h7zz8"] Jan 04 12:05:25 crc kubenswrapper[5003]: I0104 12:05:25.989501 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-h7zz8" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.000123 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-7cd87b778f-n2549"] Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.006056 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztrg6\" (UniqueName: \"kubernetes.io/projected/916a829e-a1f9-4c4c-9253-b5d1b901f2f3-kube-api-access-ztrg6\") pod \"cinder-operator-controller-manager-78979fc445-lng2r\" (UID: \"916a829e-a1f9-4c4c-9253-b5d1b901f2f3\") " pod="openstack-operators/cinder-operator-controller-manager-78979fc445-lng2r" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.010276 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-85h8d" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.026148 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-5wx48" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.034199 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-ffrrh" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.034223 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-h7zz8"] Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.036642 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ztbt\" (UniqueName: \"kubernetes.io/projected/8c9bc809-322b-4079-a1ea-533ce9239181-kube-api-access-9ztbt\") pod \"designate-operator-controller-manager-66f8b87655-5r27f\" (UID: \"8c9bc809-322b-4079-a1ea-533ce9239181\") " pod="openstack-operators/designate-operator-controller-manager-66f8b87655-5r27f" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.038381 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64lnc\" (UniqueName: \"kubernetes.io/projected/157189ac-3baf-49a7-b37b-dacddf4f43af-kube-api-access-64lnc\") pod \"keystone-operator-controller-manager-568985c78-94d29\" (UID: \"157189ac-3baf-49a7-b37b-dacddf4f43af\") " pod="openstack-operators/keystone-operator-controller-manager-568985c78-94d29" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.038421 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4qrf\" (UniqueName: \"kubernetes.io/projected/957cb0f0-d8f1-43a9-8d05-0b7db926c066-kube-api-access-k4qrf\") pod \"horizon-operator-controller-manager-7f5ddd8d7b-nbbqt\" (UID: \"957cb0f0-d8f1-43a9-8d05-0b7db926c066\") " pod="openstack-operators/horizon-operator-controller-manager-7f5ddd8d7b-nbbqt" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.038441 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jgt4\" (UniqueName: \"kubernetes.io/projected/218ad7b5-e7ca-4f1a-b863-1f160424b195-kube-api-access-4jgt4\") pod \"glance-operator-controller-manager-7b549fc966-427tw\" (UID: \"218ad7b5-e7ca-4f1a-b863-1f160424b195\") " pod="openstack-operators/glance-operator-controller-manager-7b549fc966-427tw" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.038466 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rhrg\" (UniqueName: \"kubernetes.io/projected/c317719a-c16c-4221-9fa4-029bc0d7a004-kube-api-access-5rhrg\") pod \"manila-operator-controller-manager-598945d5b8-869w7\" (UID: \"c317719a-c16c-4221-9fa4-029bc0d7a004\") " pod="openstack-operators/manila-operator-controller-manager-598945d5b8-869w7" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.038493 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5fbz\" (UniqueName: \"kubernetes.io/projected/04d05f95-12aa-4a8b-9f4c-721247547b88-kube-api-access-n5fbz\") pod \"ironic-operator-controller-manager-f99f54bc8-j5hvw\" (UID: \"04d05f95-12aa-4a8b-9f4c-721247547b88\") " pod="openstack-operators/ironic-operator-controller-manager-f99f54bc8-j5hvw" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.038528 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6fsp\" (UniqueName: \"kubernetes.io/projected/549e2841-20b6-4018-85f2-0bc091560658-kube-api-access-d6fsp\") pod \"heat-operator-controller-manager-658dd65b86-fd9hs\" (UID: \"549e2841-20b6-4018-85f2-0bc091560658\") " pod="openstack-operators/heat-operator-controller-manager-658dd65b86-fd9hs" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.038547 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c04604e1-db01-4451-8a27-e439e8f5a94c-cert\") pod \"infra-operator-controller-manager-6d99759cf-mwxxf\" (UID: \"c04604e1-db01-4451-8a27-e439e8f5a94c\") " pod="openstack-operators/infra-operator-controller-manager-6d99759cf-mwxxf" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.038588 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6n4z\" (UniqueName: \"kubernetes.io/projected/c04604e1-db01-4451-8a27-e439e8f5a94c-kube-api-access-k6n4z\") pod \"infra-operator-controller-manager-6d99759cf-mwxxf\" (UID: \"c04604e1-db01-4451-8a27-e439e8f5a94c\") " pod="openstack-operators/infra-operator-controller-manager-6d99759cf-mwxxf" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.055379 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-68c649d9d-2kxcj"] Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.056766 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-2kxcj" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.062472 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-n7dzr" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.066946 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xx76v\" (UniqueName: \"kubernetes.io/projected/c8ead0af-3ab9-4f13-b94f-83b8b48c60b7-kube-api-access-xx76v\") pod \"barbican-operator-controller-manager-f6f74d6db-89lgv\" (UID: \"c8ead0af-3ab9-4f13-b94f-83b8b48c60b7\") " pod="openstack-operators/barbican-operator-controller-manager-f6f74d6db-89lgv" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.081044 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jgt4\" (UniqueName: \"kubernetes.io/projected/218ad7b5-e7ca-4f1a-b863-1f160424b195-kube-api-access-4jgt4\") pod \"glance-operator-controller-manager-7b549fc966-427tw\" (UID: \"218ad7b5-e7ca-4f1a-b863-1f160424b195\") " pod="openstack-operators/glance-operator-controller-manager-7b549fc966-427tw" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.101700 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6fsp\" (UniqueName: \"kubernetes.io/projected/549e2841-20b6-4018-85f2-0bc091560658-kube-api-access-d6fsp\") pod \"heat-operator-controller-manager-658dd65b86-fd9hs\" (UID: \"549e2841-20b6-4018-85f2-0bc091560658\") " pod="openstack-operators/heat-operator-controller-manager-658dd65b86-fd9hs" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.103676 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4qrf\" (UniqueName: \"kubernetes.io/projected/957cb0f0-d8f1-43a9-8d05-0b7db926c066-kube-api-access-k4qrf\") pod \"horizon-operator-controller-manager-7f5ddd8d7b-nbbqt\" (UID: \"957cb0f0-d8f1-43a9-8d05-0b7db926c066\") " pod="openstack-operators/horizon-operator-controller-manager-7f5ddd8d7b-nbbqt" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.119335 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-68c649d9d-2kxcj"] Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.119655 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-7b549fc966-427tw" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.142129 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c04604e1-db01-4451-8a27-e439e8f5a94c-cert\") pod \"infra-operator-controller-manager-6d99759cf-mwxxf\" (UID: \"c04604e1-db01-4451-8a27-e439e8f5a94c\") " pod="openstack-operators/infra-operator-controller-manager-6d99759cf-mwxxf" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.142212 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6n4z\" (UniqueName: \"kubernetes.io/projected/c04604e1-db01-4451-8a27-e439e8f5a94c-kube-api-access-k6n4z\") pod \"infra-operator-controller-manager-6d99759cf-mwxxf\" (UID: \"c04604e1-db01-4451-8a27-e439e8f5a94c\") " pod="openstack-operators/infra-operator-controller-manager-6d99759cf-mwxxf" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.142247 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzt5t\" (UniqueName: \"kubernetes.io/projected/bc716961-01bb-4e23-a58e-f44f81d91bee-kube-api-access-pzt5t\") pod \"nova-operator-controller-manager-5fbbf8b6cc-h7zz8\" (UID: \"bc716961-01bb-4e23-a58e-f44f81d91bee\") " pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-h7zz8" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.142269 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llr4z\" (UniqueName: \"kubernetes.io/projected/ef8d1d25-5343-4471-80aa-df8739a0f5d6-kube-api-access-llr4z\") pod \"neutron-operator-controller-manager-7cd87b778f-n2549\" (UID: \"ef8d1d25-5343-4471-80aa-df8739a0f5d6\") " pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-n2549" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.142285 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64lnc\" (UniqueName: \"kubernetes.io/projected/157189ac-3baf-49a7-b37b-dacddf4f43af-kube-api-access-64lnc\") pod \"keystone-operator-controller-manager-568985c78-94d29\" (UID: \"157189ac-3baf-49a7-b37b-dacddf4f43af\") " pod="openstack-operators/keystone-operator-controller-manager-568985c78-94d29" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.142311 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgxtz\" (UniqueName: \"kubernetes.io/projected/b1d992ec-f4ef-4925-a225-6407ca5cea0a-kube-api-access-kgxtz\") pod \"mariadb-operator-controller-manager-7b88bfc995-c45pv\" (UID: \"b1d992ec-f4ef-4925-a225-6407ca5cea0a\") " pod="openstack-operators/mariadb-operator-controller-manager-7b88bfc995-c45pv" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.142338 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g95fb\" (UniqueName: \"kubernetes.io/projected/25453bdc-8892-48d4-aca5-cb9549e9e59d-kube-api-access-g95fb\") pod \"octavia-operator-controller-manager-68c649d9d-2kxcj\" (UID: \"25453bdc-8892-48d4-aca5-cb9549e9e59d\") " pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-2kxcj" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.142361 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rhrg\" (UniqueName: \"kubernetes.io/projected/c317719a-c16c-4221-9fa4-029bc0d7a004-kube-api-access-5rhrg\") pod \"manila-operator-controller-manager-598945d5b8-869w7\" (UID: \"c317719a-c16c-4221-9fa4-029bc0d7a004\") " pod="openstack-operators/manila-operator-controller-manager-598945d5b8-869w7" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.142391 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5fbz\" (UniqueName: \"kubernetes.io/projected/04d05f95-12aa-4a8b-9f4c-721247547b88-kube-api-access-n5fbz\") pod \"ironic-operator-controller-manager-f99f54bc8-j5hvw\" (UID: \"04d05f95-12aa-4a8b-9f4c-721247547b88\") " pod="openstack-operators/ironic-operator-controller-manager-f99f54bc8-j5hvw" Jan 04 12:05:26 crc kubenswrapper[5003]: E0104 12:05:26.142814 5003 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 04 12:05:26 crc kubenswrapper[5003]: E0104 12:05:26.142867 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c04604e1-db01-4451-8a27-e439e8f5a94c-cert podName:c04604e1-db01-4451-8a27-e439e8f5a94c nodeName:}" failed. No retries permitted until 2026-01-04 12:05:26.64285096 +0000 UTC m=+1042.115880801 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c04604e1-db01-4451-8a27-e439e8f5a94c-cert") pod "infra-operator-controller-manager-6d99759cf-mwxxf" (UID: "c04604e1-db01-4451-8a27-e439e8f5a94c") : secret "infra-operator-webhook-server-cert" not found Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.143046 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-658dd65b86-fd9hs" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.156752 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9"] Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.158065 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.161575 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-dfgqh" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.161800 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.173100 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6n4z\" (UniqueName: \"kubernetes.io/projected/c04604e1-db01-4451-8a27-e439e8f5a94c-kube-api-access-k6n4z\") pod \"infra-operator-controller-manager-6d99759cf-mwxxf\" (UID: \"c04604e1-db01-4451-8a27-e439e8f5a94c\") " pod="openstack-operators/infra-operator-controller-manager-6d99759cf-mwxxf" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.173577 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-7f5ddd8d7b-nbbqt" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.177476 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64lnc\" (UniqueName: \"kubernetes.io/projected/157189ac-3baf-49a7-b37b-dacddf4f43af-kube-api-access-64lnc\") pod \"keystone-operator-controller-manager-568985c78-94d29\" (UID: \"157189ac-3baf-49a7-b37b-dacddf4f43af\") " pod="openstack-operators/keystone-operator-controller-manager-568985c78-94d29" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.179694 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5fbz\" (UniqueName: \"kubernetes.io/projected/04d05f95-12aa-4a8b-9f4c-721247547b88-kube-api-access-n5fbz\") pod \"ironic-operator-controller-manager-f99f54bc8-j5hvw\" (UID: \"04d05f95-12aa-4a8b-9f4c-721247547b88\") " pod="openstack-operators/ironic-operator-controller-manager-f99f54bc8-j5hvw" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.183953 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9"] Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.193485 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rhrg\" (UniqueName: \"kubernetes.io/projected/c317719a-c16c-4221-9fa4-029bc0d7a004-kube-api-access-5rhrg\") pod \"manila-operator-controller-manager-598945d5b8-869w7\" (UID: \"c317719a-c16c-4221-9fa4-029bc0d7a004\") " pod="openstack-operators/manila-operator-controller-manager-598945d5b8-869w7" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.210554 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-bf6d4f946-m2pkz"] Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.211522 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-m2pkz" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.223437 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-c8xnf" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.224300 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-f99f54bc8-j5hvw" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.245041 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8crpb\" (UniqueName: \"kubernetes.io/projected/682c6705-26e9-4d83-aaa8-48fa906104dc-kube-api-access-8crpb\") pod \"openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9\" (UID: \"682c6705-26e9-4d83-aaa8-48fa906104dc\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.245365 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/682c6705-26e9-4d83-aaa8-48fa906104dc-cert\") pod \"openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9\" (UID: \"682c6705-26e9-4d83-aaa8-48fa906104dc\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.245480 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzt5t\" (UniqueName: \"kubernetes.io/projected/bc716961-01bb-4e23-a58e-f44f81d91bee-kube-api-access-pzt5t\") pod \"nova-operator-controller-manager-5fbbf8b6cc-h7zz8\" (UID: \"bc716961-01bb-4e23-a58e-f44f81d91bee\") " pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-h7zz8" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.245644 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llr4z\" (UniqueName: \"kubernetes.io/projected/ef8d1d25-5343-4471-80aa-df8739a0f5d6-kube-api-access-llr4z\") pod \"neutron-operator-controller-manager-7cd87b778f-n2549\" (UID: \"ef8d1d25-5343-4471-80aa-df8739a0f5d6\") " pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-n2549" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.245733 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgxtz\" (UniqueName: \"kubernetes.io/projected/b1d992ec-f4ef-4925-a225-6407ca5cea0a-kube-api-access-kgxtz\") pod \"mariadb-operator-controller-manager-7b88bfc995-c45pv\" (UID: \"b1d992ec-f4ef-4925-a225-6407ca5cea0a\") " pod="openstack-operators/mariadb-operator-controller-manager-7b88bfc995-c45pv" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.245821 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g95fb\" (UniqueName: \"kubernetes.io/projected/25453bdc-8892-48d4-aca5-cb9549e9e59d-kube-api-access-g95fb\") pod \"octavia-operator-controller-manager-68c649d9d-2kxcj\" (UID: \"25453bdc-8892-48d4-aca5-cb9549e9e59d\") " pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-2kxcj" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.255187 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-bf6d4f946-m2pkz"] Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.263331 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-568985c78-94d29" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.297608 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-78979fc445-lng2r" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.311653 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-f6f74d6db-89lgv" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.312801 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-598945d5b8-869w7" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.313342 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-66f8b87655-5r27f" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.342121 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-9b6f8f78c-pfsq5"] Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.344513 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-9b6f8f78c-pfsq5" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.347367 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8crpb\" (UniqueName: \"kubernetes.io/projected/682c6705-26e9-4d83-aaa8-48fa906104dc-kube-api-access-8crpb\") pod \"openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9\" (UID: \"682c6705-26e9-4d83-aaa8-48fa906104dc\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.347420 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/682c6705-26e9-4d83-aaa8-48fa906104dc-cert\") pod \"openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9\" (UID: \"682c6705-26e9-4d83-aaa8-48fa906104dc\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.347503 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5kmtq\" (UniqueName: \"kubernetes.io/projected/b7dc1fbb-7dff-4a95-90bf-d7f4dd97698f-kube-api-access-5kmtq\") pod \"ovn-operator-controller-manager-bf6d4f946-m2pkz\" (UID: \"b7dc1fbb-7dff-4a95-90bf-d7f4dd97698f\") " pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-m2pkz" Jan 04 12:05:26 crc kubenswrapper[5003]: E0104 12:05:26.347879 5003 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 04 12:05:26 crc kubenswrapper[5003]: E0104 12:05:26.347927 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/682c6705-26e9-4d83-aaa8-48fa906104dc-cert podName:682c6705-26e9-4d83-aaa8-48fa906104dc nodeName:}" failed. No retries permitted until 2026-01-04 12:05:26.847908091 +0000 UTC m=+1042.320937932 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/682c6705-26e9-4d83-aaa8-48fa906104dc-cert") pod "openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9" (UID: "682c6705-26e9-4d83-aaa8-48fa906104dc") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.358403 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g95fb\" (UniqueName: \"kubernetes.io/projected/25453bdc-8892-48d4-aca5-cb9549e9e59d-kube-api-access-g95fb\") pod \"octavia-operator-controller-manager-68c649d9d-2kxcj\" (UID: \"25453bdc-8892-48d4-aca5-cb9549e9e59d\") " pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-2kxcj" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.386239 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzt5t\" (UniqueName: \"kubernetes.io/projected/bc716961-01bb-4e23-a58e-f44f81d91bee-kube-api-access-pzt5t\") pod \"nova-operator-controller-manager-5fbbf8b6cc-h7zz8\" (UID: \"bc716961-01bb-4e23-a58e-f44f81d91bee\") " pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-h7zz8" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.388273 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llr4z\" (UniqueName: \"kubernetes.io/projected/ef8d1d25-5343-4471-80aa-df8739a0f5d6-kube-api-access-llr4z\") pod \"neutron-operator-controller-manager-7cd87b778f-n2549\" (UID: \"ef8d1d25-5343-4471-80aa-df8739a0f5d6\") " pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-n2549" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.395275 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-lx922" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.395590 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-bb586bbf4-jb94x"] Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.397294 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-bb586bbf4-jb94x" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.398057 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgxtz\" (UniqueName: \"kubernetes.io/projected/b1d992ec-f4ef-4925-a225-6407ca5cea0a-kube-api-access-kgxtz\") pod \"mariadb-operator-controller-manager-7b88bfc995-c45pv\" (UID: \"b1d992ec-f4ef-4925-a225-6407ca5cea0a\") " pod="openstack-operators/mariadb-operator-controller-manager-7b88bfc995-c45pv" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.409447 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8crpb\" (UniqueName: \"kubernetes.io/projected/682c6705-26e9-4d83-aaa8-48fa906104dc-kube-api-access-8crpb\") pod \"openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9\" (UID: \"682c6705-26e9-4d83-aaa8-48fa906104dc\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.411112 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-bb586bbf4-jb94x"] Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.429151 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-9b6f8f78c-pfsq5"] Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.431498 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-qz9lb" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.449284 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjklz\" (UniqueName: \"kubernetes.io/projected/a1e2f0e1-3b63-4ec0-a0e0-9e84f3dc891a-kube-api-access-fjklz\") pod \"swift-operator-controller-manager-bb586bbf4-jb94x\" (UID: \"a1e2f0e1-3b63-4ec0-a0e0-9e84f3dc891a\") " pod="openstack-operators/swift-operator-controller-manager-bb586bbf4-jb94x" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.449339 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qrj6\" (UniqueName: \"kubernetes.io/projected/033a9e41-3e14-4a99-9e2c-9ad9151b8cea-kube-api-access-6qrj6\") pod \"placement-operator-controller-manager-9b6f8f78c-pfsq5\" (UID: \"033a9e41-3e14-4a99-9e2c-9ad9151b8cea\") " pod="openstack-operators/placement-operator-controller-manager-9b6f8f78c-pfsq5" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.449387 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5kmtq\" (UniqueName: \"kubernetes.io/projected/b7dc1fbb-7dff-4a95-90bf-d7f4dd97698f-kube-api-access-5kmtq\") pod \"ovn-operator-controller-manager-bf6d4f946-m2pkz\" (UID: \"b7dc1fbb-7dff-4a95-90bf-d7f4dd97698f\") " pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-m2pkz" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.454727 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-68d988df55-vkj55"] Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.455928 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-68d988df55-vkj55" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.460321 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-jgcn9" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.460822 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-7b88bfc995-c45pv" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.464784 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-6c866cfdcb-d4nbt"] Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.465880 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-6c866cfdcb-d4nbt" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.466835 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-n2549" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.470508 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5kmtq\" (UniqueName: \"kubernetes.io/projected/b7dc1fbb-7dff-4a95-90bf-d7f4dd97698f-kube-api-access-5kmtq\") pod \"ovn-operator-controller-manager-bf6d4f946-m2pkz\" (UID: \"b7dc1fbb-7dff-4a95-90bf-d7f4dd97698f\") " pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-m2pkz" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.470897 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-9l2tr" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.493921 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-9dbdf6486-4trlm"] Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.494897 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-9dbdf6486-4trlm" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.496485 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-k272t" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.508407 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-h7zz8" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.525405 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-2kxcj" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.537555 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-68d988df55-vkj55"] Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.537593 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-6c866cfdcb-d4nbt"] Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.542523 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-9dbdf6486-4trlm"] Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.551023 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjklz\" (UniqueName: \"kubernetes.io/projected/a1e2f0e1-3b63-4ec0-a0e0-9e84f3dc891a-kube-api-access-fjklz\") pod \"swift-operator-controller-manager-bb586bbf4-jb94x\" (UID: \"a1e2f0e1-3b63-4ec0-a0e0-9e84f3dc891a\") " pod="openstack-operators/swift-operator-controller-manager-bb586bbf4-jb94x" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.551075 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qrj6\" (UniqueName: \"kubernetes.io/projected/033a9e41-3e14-4a99-9e2c-9ad9151b8cea-kube-api-access-6qrj6\") pod \"placement-operator-controller-manager-9b6f8f78c-pfsq5\" (UID: \"033a9e41-3e14-4a99-9e2c-9ad9151b8cea\") " pod="openstack-operators/placement-operator-controller-manager-9b6f8f78c-pfsq5" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.551116 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7d4d\" (UniqueName: \"kubernetes.io/projected/6d6813e0-7085-4a6e-af8a-7acb60007841-kube-api-access-b7d4d\") pod \"test-operator-controller-manager-6c866cfdcb-d4nbt\" (UID: \"6d6813e0-7085-4a6e-af8a-7acb60007841\") " pod="openstack-operators/test-operator-controller-manager-6c866cfdcb-d4nbt" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.551168 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5hvt\" (UniqueName: \"kubernetes.io/projected/e9ebf167-b161-4984-9c2b-caeee988e697-kube-api-access-k5hvt\") pod \"telemetry-operator-controller-manager-68d988df55-vkj55\" (UID: \"e9ebf167-b161-4984-9c2b-caeee988e697\") " pod="openstack-operators/telemetry-operator-controller-manager-68d988df55-vkj55" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.551192 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4fqxg\" (UniqueName: \"kubernetes.io/projected/03fd52b2-dba9-4298-b44b-3ef8c22a4237-kube-api-access-4fqxg\") pod \"watcher-operator-controller-manager-9dbdf6486-4trlm\" (UID: \"03fd52b2-dba9-4298-b44b-3ef8c22a4237\") " pod="openstack-operators/watcher-operator-controller-manager-9dbdf6486-4trlm" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.578670 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7df7568dd6-flxhv"] Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.580344 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-flxhv" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.591069 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7df7568dd6-flxhv"] Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.591525 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjklz\" (UniqueName: \"kubernetes.io/projected/a1e2f0e1-3b63-4ec0-a0e0-9e84f3dc891a-kube-api-access-fjklz\") pod \"swift-operator-controller-manager-bb586bbf4-jb94x\" (UID: \"a1e2f0e1-3b63-4ec0-a0e0-9e84f3dc891a\") " pod="openstack-operators/swift-operator-controller-manager-bb586bbf4-jb94x" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.591627 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qrj6\" (UniqueName: \"kubernetes.io/projected/033a9e41-3e14-4a99-9e2c-9ad9151b8cea-kube-api-access-6qrj6\") pod \"placement-operator-controller-manager-9b6f8f78c-pfsq5\" (UID: \"033a9e41-3e14-4a99-9e2c-9ad9151b8cea\") " pod="openstack-operators/placement-operator-controller-manager-9b6f8f78c-pfsq5" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.618147 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6jlbf"] Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.619372 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6jlbf"] Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.619457 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6jlbf" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.628351 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.628753 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-7k7vp" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.635346 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.635649 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-9fw5v" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.648366 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-m2pkz" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.651646 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5hvt\" (UniqueName: \"kubernetes.io/projected/e9ebf167-b161-4984-9c2b-caeee988e697-kube-api-access-k5hvt\") pod \"telemetry-operator-controller-manager-68d988df55-vkj55\" (UID: \"e9ebf167-b161-4984-9c2b-caeee988e697\") " pod="openstack-operators/telemetry-operator-controller-manager-68d988df55-vkj55" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.651687 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4fqxg\" (UniqueName: \"kubernetes.io/projected/03fd52b2-dba9-4298-b44b-3ef8c22a4237-kube-api-access-4fqxg\") pod \"watcher-operator-controller-manager-9dbdf6486-4trlm\" (UID: \"03fd52b2-dba9-4298-b44b-3ef8c22a4237\") " pod="openstack-operators/watcher-operator-controller-manager-9dbdf6486-4trlm" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.651730 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c04604e1-db01-4451-8a27-e439e8f5a94c-cert\") pod \"infra-operator-controller-manager-6d99759cf-mwxxf\" (UID: \"c04604e1-db01-4451-8a27-e439e8f5a94c\") " pod="openstack-operators/infra-operator-controller-manager-6d99759cf-mwxxf" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.651776 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-webhook-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-flxhv\" (UID: \"a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-flxhv" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.651798 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7d4d\" (UniqueName: \"kubernetes.io/projected/6d6813e0-7085-4a6e-af8a-7acb60007841-kube-api-access-b7d4d\") pod \"test-operator-controller-manager-6c866cfdcb-d4nbt\" (UID: \"6d6813e0-7085-4a6e-af8a-7acb60007841\") " pod="openstack-operators/test-operator-controller-manager-6c866cfdcb-d4nbt" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.651819 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rw2lm\" (UniqueName: \"kubernetes.io/projected/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-kube-api-access-rw2lm\") pod \"openstack-operator-controller-manager-7df7568dd6-flxhv\" (UID: \"a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-flxhv" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.651847 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rclwt\" (UniqueName: \"kubernetes.io/projected/e798bc76-3f22-4bf8-b337-bf3bf03ca3b2-kube-api-access-rclwt\") pod \"rabbitmq-cluster-operator-manager-668c99d594-6jlbf\" (UID: \"e798bc76-3f22-4bf8-b337-bf3bf03ca3b2\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6jlbf" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.651866 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-metrics-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-flxhv\" (UID: \"a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-flxhv" Jan 04 12:05:26 crc kubenswrapper[5003]: E0104 12:05:26.652331 5003 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 04 12:05:26 crc kubenswrapper[5003]: E0104 12:05:26.652374 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c04604e1-db01-4451-8a27-e439e8f5a94c-cert podName:c04604e1-db01-4451-8a27-e439e8f5a94c nodeName:}" failed. No retries permitted until 2026-01-04 12:05:27.652359123 +0000 UTC m=+1043.125388964 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c04604e1-db01-4451-8a27-e439e8f5a94c-cert") pod "infra-operator-controller-manager-6d99759cf-mwxxf" (UID: "c04604e1-db01-4451-8a27-e439e8f5a94c") : secret "infra-operator-webhook-server-cert" not found Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.702684 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7d4d\" (UniqueName: \"kubernetes.io/projected/6d6813e0-7085-4a6e-af8a-7acb60007841-kube-api-access-b7d4d\") pod \"test-operator-controller-manager-6c866cfdcb-d4nbt\" (UID: \"6d6813e0-7085-4a6e-af8a-7acb60007841\") " pod="openstack-operators/test-operator-controller-manager-6c866cfdcb-d4nbt" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.703261 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4fqxg\" (UniqueName: \"kubernetes.io/projected/03fd52b2-dba9-4298-b44b-3ef8c22a4237-kube-api-access-4fqxg\") pod \"watcher-operator-controller-manager-9dbdf6486-4trlm\" (UID: \"03fd52b2-dba9-4298-b44b-3ef8c22a4237\") " pod="openstack-operators/watcher-operator-controller-manager-9dbdf6486-4trlm" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.704136 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5hvt\" (UniqueName: \"kubernetes.io/projected/e9ebf167-b161-4984-9c2b-caeee988e697-kube-api-access-k5hvt\") pod \"telemetry-operator-controller-manager-68d988df55-vkj55\" (UID: \"e9ebf167-b161-4984-9c2b-caeee988e697\") " pod="openstack-operators/telemetry-operator-controller-manager-68d988df55-vkj55" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.742305 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-9b6f8f78c-pfsq5" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.754501 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-webhook-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-flxhv\" (UID: \"a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-flxhv" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.754533 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rw2lm\" (UniqueName: \"kubernetes.io/projected/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-kube-api-access-rw2lm\") pod \"openstack-operator-controller-manager-7df7568dd6-flxhv\" (UID: \"a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-flxhv" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.754566 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rclwt\" (UniqueName: \"kubernetes.io/projected/e798bc76-3f22-4bf8-b337-bf3bf03ca3b2-kube-api-access-rclwt\") pod \"rabbitmq-cluster-operator-manager-668c99d594-6jlbf\" (UID: \"e798bc76-3f22-4bf8-b337-bf3bf03ca3b2\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6jlbf" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.754586 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-metrics-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-flxhv\" (UID: \"a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-flxhv" Jan 04 12:05:26 crc kubenswrapper[5003]: E0104 12:05:26.754733 5003 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 04 12:05:26 crc kubenswrapper[5003]: E0104 12:05:26.754775 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-metrics-certs podName:a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14 nodeName:}" failed. No retries permitted until 2026-01-04 12:05:27.254761075 +0000 UTC m=+1042.727790916 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-metrics-certs") pod "openstack-operator-controller-manager-7df7568dd6-flxhv" (UID: "a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14") : secret "metrics-server-cert" not found Jan 04 12:05:26 crc kubenswrapper[5003]: E0104 12:05:26.755003 5003 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 04 12:05:26 crc kubenswrapper[5003]: E0104 12:05:26.755042 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-webhook-certs podName:a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14 nodeName:}" failed. No retries permitted until 2026-01-04 12:05:27.255035032 +0000 UTC m=+1042.728064873 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-webhook-certs") pod "openstack-operator-controller-manager-7df7568dd6-flxhv" (UID: "a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14") : secret "webhook-server-cert" not found Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.770389 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-bb586bbf4-jb94x" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.803430 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rw2lm\" (UniqueName: \"kubernetes.io/projected/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-kube-api-access-rw2lm\") pod \"openstack-operator-controller-manager-7df7568dd6-flxhv\" (UID: \"a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-flxhv" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.804331 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rclwt\" (UniqueName: \"kubernetes.io/projected/e798bc76-3f22-4bf8-b337-bf3bf03ca3b2-kube-api-access-rclwt\") pod \"rabbitmq-cluster-operator-manager-668c99d594-6jlbf\" (UID: \"e798bc76-3f22-4bf8-b337-bf3bf03ca3b2\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6jlbf" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.809417 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-68d988df55-vkj55" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.840240 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-6c866cfdcb-d4nbt" Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.963282 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/682c6705-26e9-4d83-aaa8-48fa906104dc-cert\") pod \"openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9\" (UID: \"682c6705-26e9-4d83-aaa8-48fa906104dc\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9" Jan 04 12:05:26 crc kubenswrapper[5003]: E0104 12:05:26.963535 5003 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 04 12:05:26 crc kubenswrapper[5003]: E0104 12:05:26.963633 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/682c6705-26e9-4d83-aaa8-48fa906104dc-cert podName:682c6705-26e9-4d83-aaa8-48fa906104dc nodeName:}" failed. No retries permitted until 2026-01-04 12:05:27.963605736 +0000 UTC m=+1043.436635577 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/682c6705-26e9-4d83-aaa8-48fa906104dc-cert") pod "openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9" (UID: "682c6705-26e9-4d83-aaa8-48fa906104dc") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 04 12:05:26 crc kubenswrapper[5003]: I0104 12:05:26.965000 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-9dbdf6486-4trlm" Jan 04 12:05:27 crc kubenswrapper[5003]: I0104 12:05:27.104824 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6jlbf" Jan 04 12:05:27 crc kubenswrapper[5003]: I0104 12:05:27.281718 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-webhook-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-flxhv\" (UID: \"a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-flxhv" Jan 04 12:05:27 crc kubenswrapper[5003]: I0104 12:05:27.281806 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-metrics-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-flxhv\" (UID: \"a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-flxhv" Jan 04 12:05:27 crc kubenswrapper[5003]: E0104 12:05:27.282057 5003 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 04 12:05:27 crc kubenswrapper[5003]: E0104 12:05:27.282081 5003 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 04 12:05:27 crc kubenswrapper[5003]: E0104 12:05:27.282129 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-metrics-certs podName:a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14 nodeName:}" failed. No retries permitted until 2026-01-04 12:05:28.282111989 +0000 UTC m=+1043.755141830 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-metrics-certs") pod "openstack-operator-controller-manager-7df7568dd6-flxhv" (UID: "a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14") : secret "metrics-server-cert" not found Jan 04 12:05:27 crc kubenswrapper[5003]: E0104 12:05:27.282157 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-webhook-certs podName:a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14 nodeName:}" failed. No retries permitted until 2026-01-04 12:05:28.282138449 +0000 UTC m=+1043.755168290 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-webhook-certs") pod "openstack-operator-controller-manager-7df7568dd6-flxhv" (UID: "a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14") : secret "webhook-server-cert" not found Jan 04 12:05:27 crc kubenswrapper[5003]: I0104 12:05:27.678262 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c04604e1-db01-4451-8a27-e439e8f5a94c-cert\") pod \"infra-operator-controller-manager-6d99759cf-mwxxf\" (UID: \"c04604e1-db01-4451-8a27-e439e8f5a94c\") " pod="openstack-operators/infra-operator-controller-manager-6d99759cf-mwxxf" Jan 04 12:05:27 crc kubenswrapper[5003]: E0104 12:05:27.678499 5003 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 04 12:05:27 crc kubenswrapper[5003]: E0104 12:05:27.679039 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c04604e1-db01-4451-8a27-e439e8f5a94c-cert podName:c04604e1-db01-4451-8a27-e439e8f5a94c nodeName:}" failed. No retries permitted until 2026-01-04 12:05:29.678995402 +0000 UTC m=+1045.152025243 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c04604e1-db01-4451-8a27-e439e8f5a94c-cert") pod "infra-operator-controller-manager-6d99759cf-mwxxf" (UID: "c04604e1-db01-4451-8a27-e439e8f5a94c") : secret "infra-operator-webhook-server-cert" not found Jan 04 12:05:28 crc kubenswrapper[5003]: I0104 12:05:28.029816 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/682c6705-26e9-4d83-aaa8-48fa906104dc-cert\") pod \"openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9\" (UID: \"682c6705-26e9-4d83-aaa8-48fa906104dc\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9" Jan 04 12:05:28 crc kubenswrapper[5003]: E0104 12:05:28.030107 5003 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 04 12:05:28 crc kubenswrapper[5003]: E0104 12:05:28.030161 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/682c6705-26e9-4d83-aaa8-48fa906104dc-cert podName:682c6705-26e9-4d83-aaa8-48fa906104dc nodeName:}" failed. No retries permitted until 2026-01-04 12:05:30.030146784 +0000 UTC m=+1045.503176625 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/682c6705-26e9-4d83-aaa8-48fa906104dc-cert") pod "openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9" (UID: "682c6705-26e9-4d83-aaa8-48fa906104dc") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 04 12:05:28 crc kubenswrapper[5003]: I0104 12:05:28.334296 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-metrics-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-flxhv\" (UID: \"a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-flxhv" Jan 04 12:05:28 crc kubenswrapper[5003]: I0104 12:05:28.334479 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-webhook-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-flxhv\" (UID: \"a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-flxhv" Jan 04 12:05:28 crc kubenswrapper[5003]: E0104 12:05:28.334713 5003 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 04 12:05:28 crc kubenswrapper[5003]: E0104 12:05:28.334798 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-webhook-certs podName:a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14 nodeName:}" failed. No retries permitted until 2026-01-04 12:05:30.334777712 +0000 UTC m=+1045.807807563 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-webhook-certs") pod "openstack-operator-controller-manager-7df7568dd6-flxhv" (UID: "a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14") : secret "webhook-server-cert" not found Jan 04 12:05:28 crc kubenswrapper[5003]: E0104 12:05:28.335459 5003 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 04 12:05:28 crc kubenswrapper[5003]: E0104 12:05:28.335606 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-metrics-certs podName:a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14 nodeName:}" failed. No retries permitted until 2026-01-04 12:05:30.335577523 +0000 UTC m=+1045.808607364 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-metrics-certs") pod "openstack-operator-controller-manager-7df7568dd6-flxhv" (UID: "a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14") : secret "metrics-server-cert" not found Jan 04 12:05:28 crc kubenswrapper[5003]: I0104 12:05:28.457097 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-78979fc445-lng2r"] Jan 04 12:05:28 crc kubenswrapper[5003]: I0104 12:05:28.478391 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-7b549fc966-427tw"] Jan 04 12:05:28 crc kubenswrapper[5003]: I0104 12:05:28.725088 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-658dd65b86-fd9hs"] Jan 04 12:05:28 crc kubenswrapper[5003]: I0104 12:05:28.740175 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-f99f54bc8-j5hvw"] Jan 04 12:05:28 crc kubenswrapper[5003]: I0104 12:05:28.747529 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-568985c78-94d29"] Jan 04 12:05:28 crc kubenswrapper[5003]: W0104 12:05:28.800960 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod549e2841_20b6_4018_85f2_0bc091560658.slice/crio-c0bef954587ea59920cc170bc9704a4f9fbf1ccbeaf74cfdc65ada85bc40baf9 WatchSource:0}: Error finding container c0bef954587ea59920cc170bc9704a4f9fbf1ccbeaf74cfdc65ada85bc40baf9: Status 404 returned error can't find the container with id c0bef954587ea59920cc170bc9704a4f9fbf1ccbeaf74cfdc65ada85bc40baf9 Jan 04 12:05:28 crc kubenswrapper[5003]: W0104 12:05:28.831248 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod04d05f95_12aa_4a8b_9f4c_721247547b88.slice/crio-20947701307ce0860323338e1dadf166690df0b64c92bc473be4e4a241ed03f6 WatchSource:0}: Error finding container 20947701307ce0860323338e1dadf166690df0b64c92bc473be4e4a241ed03f6: Status 404 returned error can't find the container with id 20947701307ce0860323338e1dadf166690df0b64c92bc473be4e4a241ed03f6 Jan 04 12:05:29 crc kubenswrapper[5003]: I0104 12:05:29.112129 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-f99f54bc8-j5hvw" event={"ID":"04d05f95-12aa-4a8b-9f4c-721247547b88","Type":"ContainerStarted","Data":"20947701307ce0860323338e1dadf166690df0b64c92bc473be4e4a241ed03f6"} Jan 04 12:05:29 crc kubenswrapper[5003]: I0104 12:05:29.113247 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-658dd65b86-fd9hs" event={"ID":"549e2841-20b6-4018-85f2-0bc091560658","Type":"ContainerStarted","Data":"c0bef954587ea59920cc170bc9704a4f9fbf1ccbeaf74cfdc65ada85bc40baf9"} Jan 04 12:05:29 crc kubenswrapper[5003]: I0104 12:05:29.114438 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-7b549fc966-427tw" event={"ID":"218ad7b5-e7ca-4f1a-b863-1f160424b195","Type":"ContainerStarted","Data":"257e4aae0219155463098552a18756d24a52a7d284d2385f7bd6c6ab3bded0fa"} Jan 04 12:05:29 crc kubenswrapper[5003]: I0104 12:05:29.115735 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-568985c78-94d29" event={"ID":"157189ac-3baf-49a7-b37b-dacddf4f43af","Type":"ContainerStarted","Data":"11c3b631ec4bc3cfecf3831cfdc775bddba2f30239af97b578d4fe354df7e3d4"} Jan 04 12:05:29 crc kubenswrapper[5003]: I0104 12:05:29.118776 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-78979fc445-lng2r" event={"ID":"916a829e-a1f9-4c4c-9253-b5d1b901f2f3","Type":"ContainerStarted","Data":"499eeb279487a92b3a40954536c6024935a8db89a020b89d753939025c589ee4"} Jan 04 12:05:29 crc kubenswrapper[5003]: I0104 12:05:29.193265 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-66f8b87655-5r27f"] Jan 04 12:05:29 crc kubenswrapper[5003]: W0104 12:05:29.204657 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8c9bc809_322b_4079_a1ea_533ce9239181.slice/crio-0cd600a7e0d6518666f01c8011238cd1539caf37485f9427cc5fc95b107a3b22 WatchSource:0}: Error finding container 0cd600a7e0d6518666f01c8011238cd1539caf37485f9427cc5fc95b107a3b22: Status 404 returned error can't find the container with id 0cd600a7e0d6518666f01c8011238cd1539caf37485f9427cc5fc95b107a3b22 Jan 04 12:05:29 crc kubenswrapper[5003]: I0104 12:05:29.232507 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-7f5ddd8d7b-nbbqt"] Jan 04 12:05:29 crc kubenswrapper[5003]: W0104 12:05:29.233455 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod957cb0f0_d8f1_43a9_8d05_0b7db926c066.slice/crio-ee421d7c2de16aa750429f3194f11d9662404616b67dc0207973a5c63b98eec9 WatchSource:0}: Error finding container ee421d7c2de16aa750429f3194f11d9662404616b67dc0207973a5c63b98eec9: Status 404 returned error can't find the container with id ee421d7c2de16aa750429f3194f11d9662404616b67dc0207973a5c63b98eec9 Jan 04 12:05:29 crc kubenswrapper[5003]: I0104 12:05:29.262239 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-bb586bbf4-jb94x"] Jan 04 12:05:29 crc kubenswrapper[5003]: I0104 12:05:29.270881 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-bf6d4f946-m2pkz"] Jan 04 12:05:29 crc kubenswrapper[5003]: I0104 12:05:29.277147 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-7b88bfc995-c45pv"] Jan 04 12:05:29 crc kubenswrapper[5003]: I0104 12:05:29.285998 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6jlbf"] Jan 04 12:05:29 crc kubenswrapper[5003]: I0104 12:05:29.293663 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-h7zz8"] Jan 04 12:05:29 crc kubenswrapper[5003]: I0104 12:05:29.440196 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-6c866cfdcb-d4nbt"] Jan 04 12:05:29 crc kubenswrapper[5003]: I0104 12:05:29.451556 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-7cd87b778f-n2549"] Jan 04 12:05:29 crc kubenswrapper[5003]: I0104 12:05:29.463766 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-68c649d9d-2kxcj"] Jan 04 12:05:29 crc kubenswrapper[5003]: I0104 12:05:29.486659 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-f6f74d6db-89lgv"] Jan 04 12:05:29 crc kubenswrapper[5003]: I0104 12:05:29.502738 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-598945d5b8-869w7"] Jan 04 12:05:29 crc kubenswrapper[5003]: I0104 12:05:29.509847 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-9b6f8f78c-pfsq5"] Jan 04 12:05:29 crc kubenswrapper[5003]: I0104 12:05:29.516004 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-68d988df55-vkj55"] Jan 04 12:05:29 crc kubenswrapper[5003]: E0104 12:05:29.518200 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-llr4z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-7cd87b778f-n2549_openstack-operators(ef8d1d25-5343-4471-80aa-df8739a0f5d6): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 04 12:05:29 crc kubenswrapper[5003]: W0104 12:05:29.518478 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc317719a_c16c_4221_9fa4_029bc0d7a004.slice/crio-1651ba83a0787989583dc67fdae107c285fdce291f6322937f00bf4b81b2d938 WatchSource:0}: Error finding container 1651ba83a0787989583dc67fdae107c285fdce291f6322937f00bf4b81b2d938: Status 404 returned error can't find the container with id 1651ba83a0787989583dc67fdae107c285fdce291f6322937f00bf4b81b2d938 Jan 04 12:05:29 crc kubenswrapper[5003]: E0104 12:05:29.519524 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-n2549" podUID="ef8d1d25-5343-4471-80aa-df8739a0f5d6" Jan 04 12:05:29 crc kubenswrapper[5003]: W0104 12:05:29.521998 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc8ead0af_3ab9_4f13_b94f_83b8b48c60b7.slice/crio-db4c0c598bcb577d96f9fe91321112df196e00f7aae85abf8bbcf778091bd7a9 WatchSource:0}: Error finding container db4c0c598bcb577d96f9fe91321112df196e00f7aae85abf8bbcf778091bd7a9: Status 404 returned error can't find the container with id db4c0c598bcb577d96f9fe91321112df196e00f7aae85abf8bbcf778091bd7a9 Jan 04 12:05:29 crc kubenswrapper[5003]: E0104 12:05:29.523504 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:c846ab4a49272557884db6b976f979e6b9dce1aa73e5eb7872b4472f44602a1c,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5rhrg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-598945d5b8-869w7_openstack-operators(c317719a-c16c-4221-9fa4-029bc0d7a004): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 04 12:05:29 crc kubenswrapper[5003]: E0104 12:05:29.524916 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/manila-operator-controller-manager-598945d5b8-869w7" podUID="c317719a-c16c-4221-9fa4-029bc0d7a004" Jan 04 12:05:29 crc kubenswrapper[5003]: E0104 12:05:29.526239 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:1b684c4ca525a279deee45980140d895e264526c5c7e0a6981d6fae6cbcaa420,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6qrj6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-9b6f8f78c-pfsq5_openstack-operators(033a9e41-3e14-4a99-9e2c-9ad9151b8cea): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 04 12:05:29 crc kubenswrapper[5003]: E0104 12:05:29.528186 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/placement-operator-controller-manager-9b6f8f78c-pfsq5" podUID="033a9e41-3e14-4a99-9e2c-9ad9151b8cea" Jan 04 12:05:29 crc kubenswrapper[5003]: W0104 12:05:29.529493 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode9ebf167_b161_4984_9c2b_caeee988e697.slice/crio-2b7cf3505f2e9f341ef6df9aae3f9110472d69b3fe2348f6d9b3861b306e26d2 WatchSource:0}: Error finding container 2b7cf3505f2e9f341ef6df9aae3f9110472d69b3fe2348f6d9b3861b306e26d2: Status 404 returned error can't find the container with id 2b7cf3505f2e9f341ef6df9aae3f9110472d69b3fe2348f6d9b3861b306e26d2 Jan 04 12:05:29 crc kubenswrapper[5003]: E0104 12:05:29.532054 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:3c1b2858c64110448d801905fbbf3ffe7f78d264cc46ab12ab2d724842dba309,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-k5hvt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-68d988df55-vkj55_openstack-operators(e9ebf167-b161-4984-9c2b-caeee988e697): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 04 12:05:29 crc kubenswrapper[5003]: I0104 12:05:29.532931 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-9dbdf6486-4trlm"] Jan 04 12:05:29 crc kubenswrapper[5003]: E0104 12:05:29.534498 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-68d988df55-vkj55" podUID="e9ebf167-b161-4984-9c2b-caeee988e697" Jan 04 12:05:29 crc kubenswrapper[5003]: E0104 12:05:29.535075 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/barbican-operator@sha256:afb66a0f8e1aa057888f7c304cc34cfea711805d9d1f05798aceb4029fef2989,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xx76v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-f6f74d6db-89lgv_openstack-operators(c8ead0af-3ab9-4f13-b94f-83b8b48c60b7): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 04 12:05:29 crc kubenswrapper[5003]: E0104 12:05:29.537752 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/barbican-operator-controller-manager-f6f74d6db-89lgv" podUID="c8ead0af-3ab9-4f13-b94f-83b8b48c60b7" Jan 04 12:05:29 crc kubenswrapper[5003]: W0104 12:05:29.538429 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod25453bdc_8892_48d4_aca5_cb9549e9e59d.slice/crio-561b3dcbd769966bd2e4c6da841d4d1e524d7da829fda8d22e8e123ee975ce8f WatchSource:0}: Error finding container 561b3dcbd769966bd2e4c6da841d4d1e524d7da829fda8d22e8e123ee975ce8f: Status 404 returned error can't find the container with id 561b3dcbd769966bd2e4c6da841d4d1e524d7da829fda8d22e8e123ee975ce8f Jan 04 12:05:29 crc kubenswrapper[5003]: E0104 12:05:29.542485 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-g95fb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-68c649d9d-2kxcj_openstack-operators(25453bdc-8892-48d4-aca5-cb9549e9e59d): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 04 12:05:29 crc kubenswrapper[5003]: E0104 12:05:29.544278 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-2kxcj" podUID="25453bdc-8892-48d4-aca5-cb9549e9e59d" Jan 04 12:05:29 crc kubenswrapper[5003]: I0104 12:05:29.687029 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c04604e1-db01-4451-8a27-e439e8f5a94c-cert\") pod \"infra-operator-controller-manager-6d99759cf-mwxxf\" (UID: \"c04604e1-db01-4451-8a27-e439e8f5a94c\") " pod="openstack-operators/infra-operator-controller-manager-6d99759cf-mwxxf" Jan 04 12:05:29 crc kubenswrapper[5003]: E0104 12:05:29.687222 5003 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 04 12:05:29 crc kubenswrapper[5003]: E0104 12:05:29.687321 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c04604e1-db01-4451-8a27-e439e8f5a94c-cert podName:c04604e1-db01-4451-8a27-e439e8f5a94c nodeName:}" failed. No retries permitted until 2026-01-04 12:05:33.687289018 +0000 UTC m=+1049.160318849 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c04604e1-db01-4451-8a27-e439e8f5a94c-cert") pod "infra-operator-controller-manager-6d99759cf-mwxxf" (UID: "c04604e1-db01-4451-8a27-e439e8f5a94c") : secret "infra-operator-webhook-server-cert" not found Jan 04 12:05:30 crc kubenswrapper[5003]: I0104 12:05:30.092797 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/682c6705-26e9-4d83-aaa8-48fa906104dc-cert\") pod \"openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9\" (UID: \"682c6705-26e9-4d83-aaa8-48fa906104dc\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9" Jan 04 12:05:30 crc kubenswrapper[5003]: E0104 12:05:30.093056 5003 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 04 12:05:30 crc kubenswrapper[5003]: E0104 12:05:30.093155 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/682c6705-26e9-4d83-aaa8-48fa906104dc-cert podName:682c6705-26e9-4d83-aaa8-48fa906104dc nodeName:}" failed. No retries permitted until 2026-01-04 12:05:34.093128147 +0000 UTC m=+1049.566157988 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/682c6705-26e9-4d83-aaa8-48fa906104dc-cert") pod "openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9" (UID: "682c6705-26e9-4d83-aaa8-48fa906104dc") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 04 12:05:30 crc kubenswrapper[5003]: I0104 12:05:30.131059 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-m2pkz" event={"ID":"b7dc1fbb-7dff-4a95-90bf-d7f4dd97698f","Type":"ContainerStarted","Data":"51c57c8b755ccb2783c10e18c98fc1bd4a3ca5fc4ece8e98ece8dc2ea828ed9b"} Jan 04 12:05:30 crc kubenswrapper[5003]: I0104 12:05:30.134882 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-n2549" event={"ID":"ef8d1d25-5343-4471-80aa-df8739a0f5d6","Type":"ContainerStarted","Data":"bd9a94475d880d61f16a368f8c052a9de7b546b05291d23d23513e499b792cec"} Jan 04 12:05:30 crc kubenswrapper[5003]: E0104 12:05:30.136961 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-n2549" podUID="ef8d1d25-5343-4471-80aa-df8739a0f5d6" Jan 04 12:05:30 crc kubenswrapper[5003]: I0104 12:05:30.138135 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-6c866cfdcb-d4nbt" event={"ID":"6d6813e0-7085-4a6e-af8a-7acb60007841","Type":"ContainerStarted","Data":"3300d83320e6a287428302f1eda3f897823d34644c5a81ef7fdaf94bc387cb1f"} Jan 04 12:05:30 crc kubenswrapper[5003]: I0104 12:05:30.140595 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6jlbf" event={"ID":"e798bc76-3f22-4bf8-b337-bf3bf03ca3b2","Type":"ContainerStarted","Data":"9a462e2811ff660b1ac195ce9131153e724b6a11fc087c5544862540e8bbf42e"} Jan 04 12:05:30 crc kubenswrapper[5003]: I0104 12:05:30.142631 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-9b6f8f78c-pfsq5" event={"ID":"033a9e41-3e14-4a99-9e2c-9ad9151b8cea","Type":"ContainerStarted","Data":"a4547ac1baffd22abd3a243d53cc758705b90e7f6c3413e28adbef6f1d2d032e"} Jan 04 12:05:30 crc kubenswrapper[5003]: I0104 12:05:30.145166 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-68d988df55-vkj55" event={"ID":"e9ebf167-b161-4984-9c2b-caeee988e697","Type":"ContainerStarted","Data":"2b7cf3505f2e9f341ef6df9aae3f9110472d69b3fe2348f6d9b3861b306e26d2"} Jan 04 12:05:30 crc kubenswrapper[5003]: E0104 12:05:30.145815 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:1b684c4ca525a279deee45980140d895e264526c5c7e0a6981d6fae6cbcaa420\\\"\"" pod="openstack-operators/placement-operator-controller-manager-9b6f8f78c-pfsq5" podUID="033a9e41-3e14-4a99-9e2c-9ad9151b8cea" Jan 04 12:05:30 crc kubenswrapper[5003]: E0104 12:05:30.147053 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:3c1b2858c64110448d801905fbbf3ffe7f78d264cc46ab12ab2d724842dba309\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-68d988df55-vkj55" podUID="e9ebf167-b161-4984-9c2b-caeee988e697" Jan 04 12:05:30 crc kubenswrapper[5003]: I0104 12:05:30.158408 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-9dbdf6486-4trlm" event={"ID":"03fd52b2-dba9-4298-b44b-3ef8c22a4237","Type":"ContainerStarted","Data":"44c880d34f73c99108d3ee919b5e32d7dafecc08f2e119fa6e2b0859dfbf47d2"} Jan 04 12:05:30 crc kubenswrapper[5003]: I0104 12:05:30.164329 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-2kxcj" event={"ID":"25453bdc-8892-48d4-aca5-cb9549e9e59d","Type":"ContainerStarted","Data":"561b3dcbd769966bd2e4c6da841d4d1e524d7da829fda8d22e8e123ee975ce8f"} Jan 04 12:05:30 crc kubenswrapper[5003]: E0104 12:05:30.166006 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-2kxcj" podUID="25453bdc-8892-48d4-aca5-cb9549e9e59d" Jan 04 12:05:30 crc kubenswrapper[5003]: I0104 12:05:30.167768 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-7b88bfc995-c45pv" event={"ID":"b1d992ec-f4ef-4925-a225-6407ca5cea0a","Type":"ContainerStarted","Data":"a1b3c1031d2cf61ee23d29d93c34ad291300c3c3891f3461716a5bbae7978e42"} Jan 04 12:05:30 crc kubenswrapper[5003]: I0104 12:05:30.183361 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bb586bbf4-jb94x" event={"ID":"a1e2f0e1-3b63-4ec0-a0e0-9e84f3dc891a","Type":"ContainerStarted","Data":"7c94209556cd0267b02bca8f49ad227c37bf18462ec1f8fce399260877437d1a"} Jan 04 12:05:30 crc kubenswrapper[5003]: I0104 12:05:30.185750 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-66f8b87655-5r27f" event={"ID":"8c9bc809-322b-4079-a1ea-533ce9239181","Type":"ContainerStarted","Data":"0cd600a7e0d6518666f01c8011238cd1539caf37485f9427cc5fc95b107a3b22"} Jan 04 12:05:30 crc kubenswrapper[5003]: I0104 12:05:30.187071 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-f6f74d6db-89lgv" event={"ID":"c8ead0af-3ab9-4f13-b94f-83b8b48c60b7","Type":"ContainerStarted","Data":"db4c0c598bcb577d96f9fe91321112df196e00f7aae85abf8bbcf778091bd7a9"} Jan 04 12:05:30 crc kubenswrapper[5003]: E0104 12:05:30.188623 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/barbican-operator@sha256:afb66a0f8e1aa057888f7c304cc34cfea711805d9d1f05798aceb4029fef2989\\\"\"" pod="openstack-operators/barbican-operator-controller-manager-f6f74d6db-89lgv" podUID="c8ead0af-3ab9-4f13-b94f-83b8b48c60b7" Jan 04 12:05:30 crc kubenswrapper[5003]: I0104 12:05:30.193871 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-598945d5b8-869w7" event={"ID":"c317719a-c16c-4221-9fa4-029bc0d7a004","Type":"ContainerStarted","Data":"1651ba83a0787989583dc67fdae107c285fdce291f6322937f00bf4b81b2d938"} Jan 04 12:05:30 crc kubenswrapper[5003]: E0104 12:05:30.198797 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:c846ab4a49272557884db6b976f979e6b9dce1aa73e5eb7872b4472f44602a1c\\\"\"" pod="openstack-operators/manila-operator-controller-manager-598945d5b8-869w7" podUID="c317719a-c16c-4221-9fa4-029bc0d7a004" Jan 04 12:05:30 crc kubenswrapper[5003]: I0104 12:05:30.199294 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-7f5ddd8d7b-nbbqt" event={"ID":"957cb0f0-d8f1-43a9-8d05-0b7db926c066","Type":"ContainerStarted","Data":"ee421d7c2de16aa750429f3194f11d9662404616b67dc0207973a5c63b98eec9"} Jan 04 12:05:30 crc kubenswrapper[5003]: I0104 12:05:30.200908 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-h7zz8" event={"ID":"bc716961-01bb-4e23-a58e-f44f81d91bee","Type":"ContainerStarted","Data":"d0281db55fc488465990b81abaa7ac85eb54a2869c1402f53482359db5f90839"} Jan 04 12:05:30 crc kubenswrapper[5003]: I0104 12:05:30.408907 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-webhook-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-flxhv\" (UID: \"a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-flxhv" Jan 04 12:05:30 crc kubenswrapper[5003]: I0104 12:05:30.409376 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-metrics-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-flxhv\" (UID: \"a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-flxhv" Jan 04 12:05:30 crc kubenswrapper[5003]: E0104 12:05:30.409148 5003 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 04 12:05:30 crc kubenswrapper[5003]: E0104 12:05:30.409533 5003 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 04 12:05:30 crc kubenswrapper[5003]: E0104 12:05:30.409612 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-webhook-certs podName:a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14 nodeName:}" failed. No retries permitted until 2026-01-04 12:05:34.409577325 +0000 UTC m=+1049.882607156 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-webhook-certs") pod "openstack-operator-controller-manager-7df7568dd6-flxhv" (UID: "a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14") : secret "webhook-server-cert" not found Jan 04 12:05:30 crc kubenswrapper[5003]: E0104 12:05:30.409663 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-metrics-certs podName:a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14 nodeName:}" failed. No retries permitted until 2026-01-04 12:05:34.409650547 +0000 UTC m=+1049.882680388 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-metrics-certs") pod "openstack-operator-controller-manager-7df7568dd6-flxhv" (UID: "a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14") : secret "metrics-server-cert" not found Jan 04 12:05:31 crc kubenswrapper[5003]: E0104 12:05:31.228953 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:1b684c4ca525a279deee45980140d895e264526c5c7e0a6981d6fae6cbcaa420\\\"\"" pod="openstack-operators/placement-operator-controller-manager-9b6f8f78c-pfsq5" podUID="033a9e41-3e14-4a99-9e2c-9ad9151b8cea" Jan 04 12:05:31 crc kubenswrapper[5003]: E0104 12:05:31.229212 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:c846ab4a49272557884db6b976f979e6b9dce1aa73e5eb7872b4472f44602a1c\\\"\"" pod="openstack-operators/manila-operator-controller-manager-598945d5b8-869w7" podUID="c317719a-c16c-4221-9fa4-029bc0d7a004" Jan 04 12:05:31 crc kubenswrapper[5003]: E0104 12:05:31.229254 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-n2549" podUID="ef8d1d25-5343-4471-80aa-df8739a0f5d6" Jan 04 12:05:31 crc kubenswrapper[5003]: E0104 12:05:31.229287 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-2kxcj" podUID="25453bdc-8892-48d4-aca5-cb9549e9e59d" Jan 04 12:05:31 crc kubenswrapper[5003]: E0104 12:05:31.229324 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/barbican-operator@sha256:afb66a0f8e1aa057888f7c304cc34cfea711805d9d1f05798aceb4029fef2989\\\"\"" pod="openstack-operators/barbican-operator-controller-manager-f6f74d6db-89lgv" podUID="c8ead0af-3ab9-4f13-b94f-83b8b48c60b7" Jan 04 12:05:31 crc kubenswrapper[5003]: E0104 12:05:31.229357 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:3c1b2858c64110448d801905fbbf3ffe7f78d264cc46ab12ab2d724842dba309\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-68d988df55-vkj55" podUID="e9ebf167-b161-4984-9c2b-caeee988e697" Jan 04 12:05:33 crc kubenswrapper[5003]: I0104 12:05:33.697284 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c04604e1-db01-4451-8a27-e439e8f5a94c-cert\") pod \"infra-operator-controller-manager-6d99759cf-mwxxf\" (UID: \"c04604e1-db01-4451-8a27-e439e8f5a94c\") " pod="openstack-operators/infra-operator-controller-manager-6d99759cf-mwxxf" Jan 04 12:05:33 crc kubenswrapper[5003]: E0104 12:05:33.697540 5003 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 04 12:05:33 crc kubenswrapper[5003]: E0104 12:05:33.697625 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c04604e1-db01-4451-8a27-e439e8f5a94c-cert podName:c04604e1-db01-4451-8a27-e439e8f5a94c nodeName:}" failed. No retries permitted until 2026-01-04 12:05:41.697601534 +0000 UTC m=+1057.170631395 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c04604e1-db01-4451-8a27-e439e8f5a94c-cert") pod "infra-operator-controller-manager-6d99759cf-mwxxf" (UID: "c04604e1-db01-4451-8a27-e439e8f5a94c") : secret "infra-operator-webhook-server-cert" not found Jan 04 12:05:34 crc kubenswrapper[5003]: I0104 12:05:34.105215 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/682c6705-26e9-4d83-aaa8-48fa906104dc-cert\") pod \"openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9\" (UID: \"682c6705-26e9-4d83-aaa8-48fa906104dc\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9" Jan 04 12:05:34 crc kubenswrapper[5003]: E0104 12:05:34.105489 5003 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 04 12:05:34 crc kubenswrapper[5003]: E0104 12:05:34.106141 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/682c6705-26e9-4d83-aaa8-48fa906104dc-cert podName:682c6705-26e9-4d83-aaa8-48fa906104dc nodeName:}" failed. No retries permitted until 2026-01-04 12:05:42.106098552 +0000 UTC m=+1057.579128433 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/682c6705-26e9-4d83-aaa8-48fa906104dc-cert") pod "openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9" (UID: "682c6705-26e9-4d83-aaa8-48fa906104dc") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 04 12:05:34 crc kubenswrapper[5003]: I0104 12:05:34.411179 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-webhook-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-flxhv\" (UID: \"a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-flxhv" Jan 04 12:05:34 crc kubenswrapper[5003]: I0104 12:05:34.411271 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-metrics-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-flxhv\" (UID: \"a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-flxhv" Jan 04 12:05:34 crc kubenswrapper[5003]: E0104 12:05:34.411393 5003 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 04 12:05:34 crc kubenswrapper[5003]: E0104 12:05:34.411429 5003 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 04 12:05:34 crc kubenswrapper[5003]: E0104 12:05:34.411512 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-webhook-certs podName:a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14 nodeName:}" failed. No retries permitted until 2026-01-04 12:05:42.41148377 +0000 UTC m=+1057.884513651 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-webhook-certs") pod "openstack-operator-controller-manager-7df7568dd6-flxhv" (UID: "a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14") : secret "webhook-server-cert" not found Jan 04 12:05:34 crc kubenswrapper[5003]: E0104 12:05:34.411549 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-metrics-certs podName:a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14 nodeName:}" failed. No retries permitted until 2026-01-04 12:05:42.411529611 +0000 UTC m=+1057.884559482 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-metrics-certs") pod "openstack-operator-controller-manager-7df7568dd6-flxhv" (UID: "a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14") : secret "metrics-server-cert" not found Jan 04 12:05:41 crc kubenswrapper[5003]: E0104 12:05:41.059355 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/heat-operator@sha256:573d7dba212cbc32101496a7cbe01e391af9891bed3bec717f16bed4d6c23e04" Jan 04 12:05:41 crc kubenswrapper[5003]: E0104 12:05:41.060252 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/heat-operator@sha256:573d7dba212cbc32101496a7cbe01e391af9891bed3bec717f16bed4d6c23e04,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-d6fsp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-658dd65b86-fd9hs_openstack-operators(549e2841-20b6-4018-85f2-0bc091560658): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:05:41 crc kubenswrapper[5003]: E0104 12:05:41.061497 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/heat-operator-controller-manager-658dd65b86-fd9hs" podUID="549e2841-20b6-4018-85f2-0bc091560658" Jan 04 12:05:41 crc kubenswrapper[5003]: E0104 12:05:41.397475 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/heat-operator@sha256:573d7dba212cbc32101496a7cbe01e391af9891bed3bec717f16bed4d6c23e04\\\"\"" pod="openstack-operators/heat-operator-controller-manager-658dd65b86-fd9hs" podUID="549e2841-20b6-4018-85f2-0bc091560658" Jan 04 12:05:41 crc kubenswrapper[5003]: I0104 12:05:41.726217 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c04604e1-db01-4451-8a27-e439e8f5a94c-cert\") pod \"infra-operator-controller-manager-6d99759cf-mwxxf\" (UID: \"c04604e1-db01-4451-8a27-e439e8f5a94c\") " pod="openstack-operators/infra-operator-controller-manager-6d99759cf-mwxxf" Jan 04 12:05:41 crc kubenswrapper[5003]: I0104 12:05:41.733768 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c04604e1-db01-4451-8a27-e439e8f5a94c-cert\") pod \"infra-operator-controller-manager-6d99759cf-mwxxf\" (UID: \"c04604e1-db01-4451-8a27-e439e8f5a94c\") " pod="openstack-operators/infra-operator-controller-manager-6d99759cf-mwxxf" Jan 04 12:05:41 crc kubenswrapper[5003]: I0104 12:05:41.785425 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-6d99759cf-mwxxf" Jan 04 12:05:41 crc kubenswrapper[5003]: I0104 12:05:41.808571 5003 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 04 12:05:41 crc kubenswrapper[5003]: E0104 12:05:41.924884 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/swift-operator@sha256:df69e4193043476bc71d0e06ac8bc7bbd17f7b624d495aae6b7c5e5b40c9e1e7" Jan 04 12:05:41 crc kubenswrapper[5003]: E0104 12:05:41.925151 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:df69e4193043476bc71d0e06ac8bc7bbd17f7b624d495aae6b7c5e5b40c9e1e7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fjklz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-bb586bbf4-jb94x_openstack-operators(a1e2f0e1-3b63-4ec0-a0e0-9e84f3dc891a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:05:41 crc kubenswrapper[5003]: E0104 12:05:41.927051 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/swift-operator-controller-manager-bb586bbf4-jb94x" podUID="a1e2f0e1-3b63-4ec0-a0e0-9e84f3dc891a" Jan 04 12:05:42 crc kubenswrapper[5003]: I0104 12:05:42.147170 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/682c6705-26e9-4d83-aaa8-48fa906104dc-cert\") pod \"openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9\" (UID: \"682c6705-26e9-4d83-aaa8-48fa906104dc\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9" Jan 04 12:05:42 crc kubenswrapper[5003]: E0104 12:05:42.147313 5003 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 04 12:05:42 crc kubenswrapper[5003]: E0104 12:05:42.147459 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/682c6705-26e9-4d83-aaa8-48fa906104dc-cert podName:682c6705-26e9-4d83-aaa8-48fa906104dc nodeName:}" failed. No retries permitted until 2026-01-04 12:05:58.147442047 +0000 UTC m=+1073.620471888 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/682c6705-26e9-4d83-aaa8-48fa906104dc-cert") pod "openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9" (UID: "682c6705-26e9-4d83-aaa8-48fa906104dc") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 04 12:05:42 crc kubenswrapper[5003]: E0104 12:05:42.402992 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:df69e4193043476bc71d0e06ac8bc7bbd17f7b624d495aae6b7c5e5b40c9e1e7\\\"\"" pod="openstack-operators/swift-operator-controller-manager-bb586bbf4-jb94x" podUID="a1e2f0e1-3b63-4ec0-a0e0-9e84f3dc891a" Jan 04 12:05:42 crc kubenswrapper[5003]: I0104 12:05:42.452659 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-webhook-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-flxhv\" (UID: \"a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-flxhv" Jan 04 12:05:42 crc kubenswrapper[5003]: I0104 12:05:42.452737 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-metrics-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-flxhv\" (UID: \"a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-flxhv" Jan 04 12:05:42 crc kubenswrapper[5003]: I0104 12:05:42.461970 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-webhook-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-flxhv\" (UID: \"a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-flxhv" Jan 04 12:05:42 crc kubenswrapper[5003]: I0104 12:05:42.472086 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14-metrics-certs\") pod \"openstack-operator-controller-manager-7df7568dd6-flxhv\" (UID: \"a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14\") " pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-flxhv" Jan 04 12:05:42 crc kubenswrapper[5003]: I0104 12:05:42.500196 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-flxhv" Jan 04 12:05:44 crc kubenswrapper[5003]: E0104 12:05:44.484733 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/test-operator@sha256:4e3d234c1398039c2593611f7b0fd2a6b284cafb1563e6737876a265b9af42b6" Jan 04 12:05:44 crc kubenswrapper[5003]: E0104 12:05:44.485349 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:4e3d234c1398039c2593611f7b0fd2a6b284cafb1563e6737876a265b9af42b6,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-b7d4d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-6c866cfdcb-d4nbt_openstack-operators(6d6813e0-7085-4a6e-af8a-7acb60007841): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:05:44 crc kubenswrapper[5003]: E0104 12:05:44.486782 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/test-operator-controller-manager-6c866cfdcb-d4nbt" podUID="6d6813e0-7085-4a6e-af8a-7acb60007841" Jan 04 12:05:45 crc kubenswrapper[5003]: E0104 12:05:45.430585 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:4e3d234c1398039c2593611f7b0fd2a6b284cafb1563e6737876a265b9af42b6\\\"\"" pod="openstack-operators/test-operator-controller-manager-6c866cfdcb-d4nbt" podUID="6d6813e0-7085-4a6e-af8a-7acb60007841" Jan 04 12:05:45 crc kubenswrapper[5003]: E0104 12:05:45.710612 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/mariadb-operator@sha256:c10647131e6fa6afeb11ea28e513b60f22dbfbb4ddc3727850b1fe5799890c41" Jan 04 12:05:45 crc kubenswrapper[5003]: E0104 12:05:45.710839 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:c10647131e6fa6afeb11ea28e513b60f22dbfbb4ddc3727850b1fe5799890c41,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kgxtz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-7b88bfc995-c45pv_openstack-operators(b1d992ec-f4ef-4925-a225-6407ca5cea0a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:05:45 crc kubenswrapper[5003]: E0104 12:05:45.712195 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/mariadb-operator-controller-manager-7b88bfc995-c45pv" podUID="b1d992ec-f4ef-4925-a225-6407ca5cea0a" Jan 04 12:05:46 crc kubenswrapper[5003]: E0104 12:05:46.429291 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:c10647131e6fa6afeb11ea28e513b60f22dbfbb4ddc3727850b1fe5799890c41\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-7b88bfc995-c45pv" podUID="b1d992ec-f4ef-4925-a225-6407ca5cea0a" Jan 04 12:05:46 crc kubenswrapper[5003]: E0104 12:05:46.722711 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59" Jan 04 12:05:46 crc kubenswrapper[5003]: E0104 12:05:46.723350 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5kmtq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-bf6d4f946-m2pkz_openstack-operators(b7dc1fbb-7dff-4a95-90bf-d7f4dd97698f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:05:46 crc kubenswrapper[5003]: E0104 12:05:46.724908 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-m2pkz" podUID="b7dc1fbb-7dff-4a95-90bf-d7f4dd97698f" Jan 04 12:05:47 crc kubenswrapper[5003]: E0104 12:05:47.464830 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-m2pkz" podUID="b7dc1fbb-7dff-4a95-90bf-d7f4dd97698f" Jan 04 12:05:49 crc kubenswrapper[5003]: E0104 12:05:49.779763 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/designate-operator@sha256:900050d3501c0785b227db34b89883efe68247816e5c7427cacb74f8aa10605a" Jan 04 12:05:49 crc kubenswrapper[5003]: E0104 12:05:49.780411 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/designate-operator@sha256:900050d3501c0785b227db34b89883efe68247816e5c7427cacb74f8aa10605a,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9ztbt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-66f8b87655-5r27f_openstack-operators(8c9bc809-322b-4079-a1ea-533ce9239181): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:05:49 crc kubenswrapper[5003]: E0104 12:05:49.781799 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/designate-operator-controller-manager-66f8b87655-5r27f" podUID="8c9bc809-322b-4079-a1ea-533ce9239181" Jan 04 12:05:50 crc kubenswrapper[5003]: E0104 12:05:50.686167 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/designate-operator@sha256:900050d3501c0785b227db34b89883efe68247816e5c7427cacb74f8aa10605a\\\"\"" pod="openstack-operators/designate-operator-controller-manager-66f8b87655-5r27f" podUID="8c9bc809-322b-4079-a1ea-533ce9239181" Jan 04 12:05:50 crc kubenswrapper[5003]: E0104 12:05:50.935866 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/watcher-operator@sha256:f0ece9a81e4be3dbc1ff752a951970380546d8c0dea910953f862c219444b97a" Jan 04 12:05:50 crc kubenswrapper[5003]: E0104 12:05:50.936106 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:f0ece9a81e4be3dbc1ff752a951970380546d8c0dea910953f862c219444b97a,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4fqxg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-9dbdf6486-4trlm_openstack-operators(03fd52b2-dba9-4298-b44b-3ef8c22a4237): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:05:50 crc kubenswrapper[5003]: E0104 12:05:50.937315 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/watcher-operator-controller-manager-9dbdf6486-4trlm" podUID="03fd52b2-dba9-4298-b44b-3ef8c22a4237" Jan 04 12:05:51 crc kubenswrapper[5003]: E0104 12:05:51.687201 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:f0ece9a81e4be3dbc1ff752a951970380546d8c0dea910953f862c219444b97a\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-9dbdf6486-4trlm" podUID="03fd52b2-dba9-4298-b44b-3ef8c22a4237" Jan 04 12:05:54 crc kubenswrapper[5003]: E0104 12:05:54.697171 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:879d3d679b58ae84419b7907ad092ad4d24bcc9222ce621ce464fd0fea347b0c" Jan 04 12:05:54 crc kubenswrapper[5003]: E0104 12:05:54.698068 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:879d3d679b58ae84419b7907ad092ad4d24bcc9222ce621ce464fd0fea347b0c,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-64lnc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-568985c78-94d29_openstack-operators(157189ac-3baf-49a7-b37b-dacddf4f43af): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:05:54 crc kubenswrapper[5003]: E0104 12:05:54.699414 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-568985c78-94d29" podUID="157189ac-3baf-49a7-b37b-dacddf4f43af" Jan 04 12:05:54 crc kubenswrapper[5003]: E0104 12:05:54.732477 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:879d3d679b58ae84419b7907ad092ad4d24bcc9222ce621ce464fd0fea347b0c\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-568985c78-94d29" podUID="157189ac-3baf-49a7-b37b-dacddf4f43af" Jan 04 12:05:55 crc kubenswrapper[5003]: E0104 12:05:55.224279 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670" Jan 04 12:05:55 crc kubenswrapper[5003]: E0104 12:05:55.224490 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-pzt5t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-5fbbf8b6cc-h7zz8_openstack-operators(bc716961-01bb-4e23-a58e-f44f81d91bee): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:05:55 crc kubenswrapper[5003]: E0104 12:05:55.225760 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-h7zz8" podUID="bc716961-01bb-4e23-a58e-f44f81d91bee" Jan 04 12:05:55 crc kubenswrapper[5003]: E0104 12:05:55.741406 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670\\\"\"" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-h7zz8" podUID="bc716961-01bb-4e23-a58e-f44f81d91bee" Jan 04 12:05:58 crc kubenswrapper[5003]: I0104 12:05:58.203512 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/682c6705-26e9-4d83-aaa8-48fa906104dc-cert\") pod \"openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9\" (UID: \"682c6705-26e9-4d83-aaa8-48fa906104dc\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9" Jan 04 12:05:58 crc kubenswrapper[5003]: I0104 12:05:58.217550 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/682c6705-26e9-4d83-aaa8-48fa906104dc-cert\") pod \"openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9\" (UID: \"682c6705-26e9-4d83-aaa8-48fa906104dc\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9" Jan 04 12:05:58 crc kubenswrapper[5003]: I0104 12:05:58.393836 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9" Jan 04 12:05:59 crc kubenswrapper[5003]: E0104 12:05:59.644375 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Jan 04 12:05:59 crc kubenswrapper[5003]: E0104 12:05:59.644988 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rclwt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-6jlbf_openstack-operators(e798bc76-3f22-4bf8-b337-bf3bf03ca3b2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:05:59 crc kubenswrapper[5003]: E0104 12:05:59.646188 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6jlbf" podUID="e798bc76-3f22-4bf8-b337-bf3bf03ca3b2" Jan 04 12:05:59 crc kubenswrapper[5003]: E0104 12:05:59.765477 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6jlbf" podUID="e798bc76-3f22-4bf8-b337-bf3bf03ca3b2" Jan 04 12:06:01 crc kubenswrapper[5003]: E0104 12:06:01.513729 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168" Jan 04 12:06:01 crc kubenswrapper[5003]: E0104 12:06:01.514445 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-g95fb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-68c649d9d-2kxcj_openstack-operators(25453bdc-8892-48d4-aca5-cb9549e9e59d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:06:01 crc kubenswrapper[5003]: E0104 12:06:01.515645 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-2kxcj" podUID="25453bdc-8892-48d4-aca5-cb9549e9e59d" Jan 04 12:06:04 crc kubenswrapper[5003]: E0104 12:06:04.637430 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/placement-operator@sha256:1b684c4ca525a279deee45980140d895e264526c5c7e0a6981d6fae6cbcaa420" Jan 04 12:06:04 crc kubenswrapper[5003]: E0104 12:06:04.637995 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:1b684c4ca525a279deee45980140d895e264526c5c7e0a6981d6fae6cbcaa420,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6qrj6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-9b6f8f78c-pfsq5_openstack-operators(033a9e41-3e14-4a99-9e2c-9ad9151b8cea): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:06:04 crc kubenswrapper[5003]: E0104 12:06:04.639161 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/placement-operator-controller-manager-9b6f8f78c-pfsq5" podUID="033a9e41-3e14-4a99-9e2c-9ad9151b8cea" Jan 04 12:06:05 crc kubenswrapper[5003]: I0104 12:06:05.132195 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-6d99759cf-mwxxf"] Jan 04 12:06:05 crc kubenswrapper[5003]: I0104 12:06:05.330896 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9"] Jan 04 12:06:05 crc kubenswrapper[5003]: W0104 12:06:05.349178 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod682c6705_26e9_4d83_aaa8_48fa906104dc.slice/crio-e3ba66cfdf77cdfb7efe5bce4bda4ce1ac7320b79564394af2c6cf66118efb08 WatchSource:0}: Error finding container e3ba66cfdf77cdfb7efe5bce4bda4ce1ac7320b79564394af2c6cf66118efb08: Status 404 returned error can't find the container with id e3ba66cfdf77cdfb7efe5bce4bda4ce1ac7320b79564394af2c6cf66118efb08 Jan 04 12:06:05 crc kubenswrapper[5003]: I0104 12:06:05.370311 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7df7568dd6-flxhv"] Jan 04 12:06:05 crc kubenswrapper[5003]: I0104 12:06:05.833290 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-7b88bfc995-c45pv" event={"ID":"b1d992ec-f4ef-4925-a225-6407ca5cea0a","Type":"ContainerStarted","Data":"9d364f8767144768c82101e09deec01a8ebc28e8cfb9e081bfb173dbde290de3"} Jan 04 12:06:05 crc kubenswrapper[5003]: I0104 12:06:05.833942 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-7b88bfc995-c45pv" Jan 04 12:06:05 crc kubenswrapper[5003]: I0104 12:06:05.836297 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-66f8b87655-5r27f" event={"ID":"8c9bc809-322b-4079-a1ea-533ce9239181","Type":"ContainerStarted","Data":"3b786bcaf1a52c6e832f37eb08cdb3961549d9cbfe6966975d94db150bb3f86b"} Jan 04 12:06:05 crc kubenswrapper[5003]: I0104 12:06:05.836639 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-66f8b87655-5r27f" Jan 04 12:06:05 crc kubenswrapper[5003]: I0104 12:06:05.837661 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-f6f74d6db-89lgv" event={"ID":"c8ead0af-3ab9-4f13-b94f-83b8b48c60b7","Type":"ContainerStarted","Data":"83c0da37383833199843ba640011f4604a3a132f37135d7bb76e4d300ed20527"} Jan 04 12:06:05 crc kubenswrapper[5003]: I0104 12:06:05.838013 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-f6f74d6db-89lgv" Jan 04 12:06:05 crc kubenswrapper[5003]: I0104 12:06:05.839556 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-78979fc445-lng2r" event={"ID":"916a829e-a1f9-4c4c-9253-b5d1b901f2f3","Type":"ContainerStarted","Data":"33bdb24e224fbf8b7acff6b62fbe59f28af30e0ce3d53d0931fedacabb98ba34"} Jan 04 12:06:05 crc kubenswrapper[5003]: I0104 12:06:05.839943 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-78979fc445-lng2r" Jan 04 12:06:05 crc kubenswrapper[5003]: I0104 12:06:05.842075 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-m2pkz" event={"ID":"b7dc1fbb-7dff-4a95-90bf-d7f4dd97698f","Type":"ContainerStarted","Data":"999cd69bf59142a31dc7a56f8eaac2ad973ccc6fae228e264ff180f47fb84bb1"} Jan 04 12:06:05 crc kubenswrapper[5003]: I0104 12:06:05.842419 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-m2pkz" Jan 04 12:06:05 crc kubenswrapper[5003]: I0104 12:06:05.850032 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bb586bbf4-jb94x" event={"ID":"a1e2f0e1-3b63-4ec0-a0e0-9e84f3dc891a","Type":"ContainerStarted","Data":"168ad480cafbf1b5148b513a7d758ff9d1c11f6e495b03d62eb18f1e654bc7fd"} Jan 04 12:06:05 crc kubenswrapper[5003]: I0104 12:06:05.850607 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-bb586bbf4-jb94x" Jan 04 12:06:05 crc kubenswrapper[5003]: I0104 12:06:05.859495 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-f99f54bc8-j5hvw" event={"ID":"04d05f95-12aa-4a8b-9f4c-721247547b88","Type":"ContainerStarted","Data":"2dc5eb1f7a40fdc4f30d40a78de7316d63fd5d6e906d0e30b2c30f3cf05c66d0"} Jan 04 12:06:05 crc kubenswrapper[5003]: I0104 12:06:05.859846 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-f99f54bc8-j5hvw" Jan 04 12:06:05 crc kubenswrapper[5003]: I0104 12:06:05.864162 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-66f8b87655-5r27f" podStartSLOduration=4.778391413 podStartE2EDuration="40.864140148s" podCreationTimestamp="2026-01-04 12:05:25 +0000 UTC" firstStartedPulling="2026-01-04 12:05:29.207031693 +0000 UTC m=+1044.680061534" lastFinishedPulling="2026-01-04 12:06:05.292780428 +0000 UTC m=+1080.765810269" observedRunningTime="2026-01-04 12:06:05.862126335 +0000 UTC m=+1081.335156176" watchObservedRunningTime="2026-01-04 12:06:05.864140148 +0000 UTC m=+1081.337169989" Jan 04 12:06:05 crc kubenswrapper[5003]: I0104 12:06:05.865458 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-6c866cfdcb-d4nbt" event={"ID":"6d6813e0-7085-4a6e-af8a-7acb60007841","Type":"ContainerStarted","Data":"272d8e7caad15f249c3d9887de7e3089d65089d84ea2ffd426a0890c818119db"} Jan 04 12:06:05 crc kubenswrapper[5003]: I0104 12:06:05.865845 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-6c866cfdcb-d4nbt" Jan 04 12:06:05 crc kubenswrapper[5003]: I0104 12:06:05.870693 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-658dd65b86-fd9hs" event={"ID":"549e2841-20b6-4018-85f2-0bc091560658","Type":"ContainerStarted","Data":"71380c977377b84821594229e5c416d2e70b39a6b2a10624f0fb661ff8e92d95"} Jan 04 12:06:05 crc kubenswrapper[5003]: I0104 12:06:05.870896 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-658dd65b86-fd9hs" Jan 04 12:06:05 crc kubenswrapper[5003]: I0104 12:06:05.883941 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-f6f74d6db-89lgv" podStartSLOduration=5.555747988 podStartE2EDuration="40.883923688s" podCreationTimestamp="2026-01-04 12:05:25 +0000 UTC" firstStartedPulling="2026-01-04 12:05:29.534888802 +0000 UTC m=+1045.007918643" lastFinishedPulling="2026-01-04 12:06:04.863064502 +0000 UTC m=+1080.336094343" observedRunningTime="2026-01-04 12:06:05.879339687 +0000 UTC m=+1081.352369528" watchObservedRunningTime="2026-01-04 12:06:05.883923688 +0000 UTC m=+1081.356953529" Jan 04 12:06:05 crc kubenswrapper[5003]: I0104 12:06:05.890928 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9" event={"ID":"682c6705-26e9-4d83-aaa8-48fa906104dc","Type":"ContainerStarted","Data":"e3ba66cfdf77cdfb7efe5bce4bda4ce1ac7320b79564394af2c6cf66118efb08"} Jan 04 12:06:05 crc kubenswrapper[5003]: I0104 12:06:05.898780 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-7b88bfc995-c45pv" podStartSLOduration=5.271584268 podStartE2EDuration="40.898761718s" podCreationTimestamp="2026-01-04 12:05:25 +0000 UTC" firstStartedPulling="2026-01-04 12:05:29.316666895 +0000 UTC m=+1044.789696736" lastFinishedPulling="2026-01-04 12:06:04.943844345 +0000 UTC m=+1080.416874186" observedRunningTime="2026-01-04 12:06:05.895364169 +0000 UTC m=+1081.368394010" watchObservedRunningTime="2026-01-04 12:06:05.898761718 +0000 UTC m=+1081.371791559" Jan 04 12:06:05 crc kubenswrapper[5003]: I0104 12:06:05.960041 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-7b549fc966-427tw" event={"ID":"218ad7b5-e7ca-4f1a-b863-1f160424b195","Type":"ContainerStarted","Data":"7c4fd17b7113a6837f037638c4869111c318dae48b413533e7082ae9d4baf7a2"} Jan 04 12:06:05 crc kubenswrapper[5003]: I0104 12:06:05.960216 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-7b549fc966-427tw" Jan 04 12:06:05 crc kubenswrapper[5003]: I0104 12:06:05.976131 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-flxhv" event={"ID":"a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14","Type":"ContainerStarted","Data":"fa393c348fabb7e2429fb403cd933230881cc18b267f02424487556113e5ab64"} Jan 04 12:06:05 crc kubenswrapper[5003]: I0104 12:06:05.976197 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-flxhv" event={"ID":"a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14","Type":"ContainerStarted","Data":"55cfeb580cb1724901e948a20c38a7fbdae07a8e1188317fb0c467a562b0cfc0"} Jan 04 12:06:05 crc kubenswrapper[5003]: I0104 12:06:05.977211 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-flxhv" Jan 04 12:06:05 crc kubenswrapper[5003]: I0104 12:06:05.985767 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-68d988df55-vkj55" event={"ID":"e9ebf167-b161-4984-9c2b-caeee988e697","Type":"ContainerStarted","Data":"ca607808e3cb857146c76c455fcdf172dcf6254410bc57a0a9d39a433dd0cc89"} Jan 04 12:06:05 crc kubenswrapper[5003]: I0104 12:06:05.986314 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-68d988df55-vkj55" Jan 04 12:06:06 crc kubenswrapper[5003]: I0104 12:06:06.052065 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-7f5ddd8d7b-nbbqt" event={"ID":"957cb0f0-d8f1-43a9-8d05-0b7db926c066","Type":"ContainerStarted","Data":"bd360d6639d0cd51411e12fce0dbf29605dbfb0829dfaf93c116445bb66129e0"} Jan 04 12:06:06 crc kubenswrapper[5003]: I0104 12:06:06.052157 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-7f5ddd8d7b-nbbqt" Jan 04 12:06:06 crc kubenswrapper[5003]: I0104 12:06:06.056265 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-6d99759cf-mwxxf" event={"ID":"c04604e1-db01-4451-8a27-e439e8f5a94c","Type":"ContainerStarted","Data":"d14b249b31951119248fb8254dd3052a48dcbb8d069ba0bda279bdbf02243fee"} Jan 04 12:06:06 crc kubenswrapper[5003]: I0104 12:06:06.063267 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-598945d5b8-869w7" event={"ID":"c317719a-c16c-4221-9fa4-029bc0d7a004","Type":"ContainerStarted","Data":"5c7b23dd8dccdb34a2e684b0e61ec495dfffdd0cf9cbb9c0ef3cb2a2175b2e2c"} Jan 04 12:06:06 crc kubenswrapper[5003]: I0104 12:06:06.063797 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-598945d5b8-869w7" Jan 04 12:06:06 crc kubenswrapper[5003]: I0104 12:06:06.067814 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-n2549" event={"ID":"ef8d1d25-5343-4471-80aa-df8739a0f5d6","Type":"ContainerStarted","Data":"1ca12e35894ac271bb7efa908034b715aa2237ca2e541773e5a2910b94a00346"} Jan 04 12:06:06 crc kubenswrapper[5003]: I0104 12:06:06.068322 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-n2549" Jan 04 12:06:06 crc kubenswrapper[5003]: I0104 12:06:06.068832 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-m2pkz" podStartSLOduration=4.464825376 podStartE2EDuration="40.068817347s" podCreationTimestamp="2026-01-04 12:05:26 +0000 UTC" firstStartedPulling="2026-01-04 12:05:29.267605585 +0000 UTC m=+1044.740635426" lastFinishedPulling="2026-01-04 12:06:04.871597556 +0000 UTC m=+1080.344627397" observedRunningTime="2026-01-04 12:06:05.979306234 +0000 UTC m=+1081.452336075" watchObservedRunningTime="2026-01-04 12:06:06.068817347 +0000 UTC m=+1081.541847188" Jan 04 12:06:06 crc kubenswrapper[5003]: I0104 12:06:06.069196 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-bb586bbf4-jb94x" podStartSLOduration=4.506859012 podStartE2EDuration="40.069190347s" podCreationTimestamp="2026-01-04 12:05:26 +0000 UTC" firstStartedPulling="2026-01-04 12:05:29.310157974 +0000 UTC m=+1044.783187815" lastFinishedPulling="2026-01-04 12:06:04.872489309 +0000 UTC m=+1080.345519150" observedRunningTime="2026-01-04 12:06:06.066873186 +0000 UTC m=+1081.539903027" watchObservedRunningTime="2026-01-04 12:06:06.069190347 +0000 UTC m=+1081.542220188" Jan 04 12:06:06 crc kubenswrapper[5003]: I0104 12:06:06.109406 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-78979fc445-lng2r" podStartSLOduration=13.525411908 podStartE2EDuration="41.109389214s" podCreationTimestamp="2026-01-04 12:05:25 +0000 UTC" firstStartedPulling="2026-01-04 12:05:28.693574984 +0000 UTC m=+1044.166604825" lastFinishedPulling="2026-01-04 12:05:56.27755228 +0000 UTC m=+1071.750582131" observedRunningTime="2026-01-04 12:06:06.106347894 +0000 UTC m=+1081.579377735" watchObservedRunningTime="2026-01-04 12:06:06.109389214 +0000 UTC m=+1081.582419055" Jan 04 12:06:06 crc kubenswrapper[5003]: I0104 12:06:06.128754 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-6c866cfdcb-d4nbt" podStartSLOduration=4.715391383 podStartE2EDuration="40.128730922s" podCreationTimestamp="2026-01-04 12:05:26 +0000 UTC" firstStartedPulling="2026-01-04 12:05:29.512836692 +0000 UTC m=+1044.985866533" lastFinishedPulling="2026-01-04 12:06:04.926176231 +0000 UTC m=+1080.399206072" observedRunningTime="2026-01-04 12:06:06.123014982 +0000 UTC m=+1081.596044833" watchObservedRunningTime="2026-01-04 12:06:06.128730922 +0000 UTC m=+1081.601760763" Jan 04 12:06:06 crc kubenswrapper[5003]: I0104 12:06:06.192508 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-658dd65b86-fd9hs" podStartSLOduration=5.212258666 podStartE2EDuration="41.192490068s" podCreationTimestamp="2026-01-04 12:05:25 +0000 UTC" firstStartedPulling="2026-01-04 12:05:28.812733427 +0000 UTC m=+1044.285763268" lastFinishedPulling="2026-01-04 12:06:04.792964789 +0000 UTC m=+1080.265994670" observedRunningTime="2026-01-04 12:06:06.180635957 +0000 UTC m=+1081.653665818" watchObservedRunningTime="2026-01-04 12:06:06.192490068 +0000 UTC m=+1081.665519909" Jan 04 12:06:06 crc kubenswrapper[5003]: I0104 12:06:06.326848 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-f99f54bc8-j5hvw" podStartSLOduration=14.492142662 podStartE2EDuration="41.32683255s" podCreationTimestamp="2026-01-04 12:05:25 +0000 UTC" firstStartedPulling="2026-01-04 12:05:28.841242856 +0000 UTC m=+1044.314272697" lastFinishedPulling="2026-01-04 12:05:55.675932754 +0000 UTC m=+1071.148962585" observedRunningTime="2026-01-04 12:06:06.32419222 +0000 UTC m=+1081.797222061" watchObservedRunningTime="2026-01-04 12:06:06.32683255 +0000 UTC m=+1081.799862391" Jan 04 12:06:06 crc kubenswrapper[5003]: I0104 12:06:06.843062 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-598945d5b8-869w7" podStartSLOduration=6.502962697 podStartE2EDuration="41.84303662s" podCreationTimestamp="2026-01-04 12:05:25 +0000 UTC" firstStartedPulling="2026-01-04 12:05:29.523308247 +0000 UTC m=+1044.996338088" lastFinishedPulling="2026-01-04 12:06:04.86338217 +0000 UTC m=+1080.336412011" observedRunningTime="2026-01-04 12:06:06.571344868 +0000 UTC m=+1082.044374709" watchObservedRunningTime="2026-01-04 12:06:06.84303662 +0000 UTC m=+1082.316066481" Jan 04 12:06:06 crc kubenswrapper[5003]: I0104 12:06:06.970249 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-flxhv" podStartSLOduration=40.970229604 podStartE2EDuration="40.970229604s" podCreationTimestamp="2026-01-04 12:05:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:06:06.844989241 +0000 UTC m=+1082.318019092" watchObservedRunningTime="2026-01-04 12:06:06.970229604 +0000 UTC m=+1082.443259445" Jan 04 12:06:06 crc kubenswrapper[5003]: I0104 12:06:06.987291 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-7b549fc966-427tw" podStartSLOduration=15.004819499 podStartE2EDuration="41.987274092s" podCreationTimestamp="2026-01-04 12:05:25 +0000 UTC" firstStartedPulling="2026-01-04 12:05:28.693961324 +0000 UTC m=+1044.166991165" lastFinishedPulling="2026-01-04 12:05:55.676415917 +0000 UTC m=+1071.149445758" observedRunningTime="2026-01-04 12:06:06.985455924 +0000 UTC m=+1082.458485765" watchObservedRunningTime="2026-01-04 12:06:06.987274092 +0000 UTC m=+1082.460303933" Jan 04 12:06:06 crc kubenswrapper[5003]: I0104 12:06:06.994838 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-68d988df55-vkj55" podStartSLOduration=5.664208016 podStartE2EDuration="40.994780759s" podCreationTimestamp="2026-01-04 12:05:26 +0000 UTC" firstStartedPulling="2026-01-04 12:05:29.531941434 +0000 UTC m=+1045.004971275" lastFinishedPulling="2026-01-04 12:06:04.862514177 +0000 UTC m=+1080.335544018" observedRunningTime="2026-01-04 12:06:06.962766537 +0000 UTC m=+1082.435796398" watchObservedRunningTime="2026-01-04 12:06:06.994780759 +0000 UTC m=+1082.467810600" Jan 04 12:06:07 crc kubenswrapper[5003]: I0104 12:06:07.083358 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-9dbdf6486-4trlm" event={"ID":"03fd52b2-dba9-4298-b44b-3ef8c22a4237","Type":"ContainerStarted","Data":"7cbbfb9011ab63a069bc7604ac8b2aa8599a64823c4872d28515819cde03e95c"} Jan 04 12:06:07 crc kubenswrapper[5003]: I0104 12:06:07.085821 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-n2549" podStartSLOduration=6.742588547 podStartE2EDuration="42.085805842s" podCreationTimestamp="2026-01-04 12:05:25 +0000 UTC" firstStartedPulling="2026-01-04 12:05:29.517999348 +0000 UTC m=+1044.991029189" lastFinishedPulling="2026-01-04 12:06:04.861216643 +0000 UTC m=+1080.334246484" observedRunningTime="2026-01-04 12:06:07.073265132 +0000 UTC m=+1082.546294973" watchObservedRunningTime="2026-01-04 12:06:07.085805842 +0000 UTC m=+1082.558835683" Jan 04 12:06:07 crc kubenswrapper[5003]: I0104 12:06:07.156703 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-7f5ddd8d7b-nbbqt" podStartSLOduration=15.715248566 podStartE2EDuration="42.156682835s" podCreationTimestamp="2026-01-04 12:05:25 +0000 UTC" firstStartedPulling="2026-01-04 12:05:29.235224734 +0000 UTC m=+1044.708254575" lastFinishedPulling="2026-01-04 12:05:55.676659003 +0000 UTC m=+1071.149688844" observedRunningTime="2026-01-04 12:06:07.156142541 +0000 UTC m=+1082.629172392" watchObservedRunningTime="2026-01-04 12:06:07.156682835 +0000 UTC m=+1082.629712686" Jan 04 12:06:07 crc kubenswrapper[5003]: I0104 12:06:07.259810 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-9dbdf6486-4trlm" podStartSLOduration=4.305839939 podStartE2EDuration="41.259793596s" podCreationTimestamp="2026-01-04 12:05:26 +0000 UTC" firstStartedPulling="2026-01-04 12:05:29.517988477 +0000 UTC m=+1044.991018318" lastFinishedPulling="2026-01-04 12:06:06.471942134 +0000 UTC m=+1081.944971975" observedRunningTime="2026-01-04 12:06:07.257748652 +0000 UTC m=+1082.730778493" watchObservedRunningTime="2026-01-04 12:06:07.259793596 +0000 UTC m=+1082.732823437" Jan 04 12:06:11 crc kubenswrapper[5003]: E0104 12:06:11.808821 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-2kxcj" podUID="25453bdc-8892-48d4-aca5-cb9549e9e59d" Jan 04 12:06:12 crc kubenswrapper[5003]: I0104 12:06:12.509587 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-7df7568dd6-flxhv" Jan 04 12:06:16 crc kubenswrapper[5003]: I0104 12:06:16.123611 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-7b549fc966-427tw" Jan 04 12:06:16 crc kubenswrapper[5003]: I0104 12:06:16.148414 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-658dd65b86-fd9hs" Jan 04 12:06:16 crc kubenswrapper[5003]: I0104 12:06:16.190724 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-7f5ddd8d7b-nbbqt" Jan 04 12:06:16 crc kubenswrapper[5003]: I0104 12:06:16.227153 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-f99f54bc8-j5hvw" Jan 04 12:06:16 crc kubenswrapper[5003]: I0104 12:06:16.301747 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-78979fc445-lng2r" Jan 04 12:06:16 crc kubenswrapper[5003]: I0104 12:06:16.316495 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-598945d5b8-869w7" Jan 04 12:06:16 crc kubenswrapper[5003]: I0104 12:06:16.322212 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-f6f74d6db-89lgv" Jan 04 12:06:16 crc kubenswrapper[5003]: I0104 12:06:16.322985 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-66f8b87655-5r27f" Jan 04 12:06:16 crc kubenswrapper[5003]: I0104 12:06:16.464115 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-7b88bfc995-c45pv" Jan 04 12:06:16 crc kubenswrapper[5003]: I0104 12:06:16.469863 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-7cd87b778f-n2549" Jan 04 12:06:16 crc kubenswrapper[5003]: I0104 12:06:16.654297 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-bf6d4f946-m2pkz" Jan 04 12:06:16 crc kubenswrapper[5003]: I0104 12:06:16.774988 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-bb586bbf4-jb94x" Jan 04 12:06:16 crc kubenswrapper[5003]: E0104 12:06:16.809714 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:1b684c4ca525a279deee45980140d895e264526c5c7e0a6981d6fae6cbcaa420\\\"\"" pod="openstack-operators/placement-operator-controller-manager-9b6f8f78c-pfsq5" podUID="033a9e41-3e14-4a99-9e2c-9ad9151b8cea" Jan 04 12:06:16 crc kubenswrapper[5003]: I0104 12:06:16.832674 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-68d988df55-vkj55" Jan 04 12:06:16 crc kubenswrapper[5003]: I0104 12:06:16.850859 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-6c866cfdcb-d4nbt" Jan 04 12:06:16 crc kubenswrapper[5003]: I0104 12:06:16.966093 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-9dbdf6486-4trlm" Jan 04 12:06:16 crc kubenswrapper[5003]: I0104 12:06:16.968741 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-9dbdf6486-4trlm" Jan 04 12:06:19 crc kubenswrapper[5003]: E0104 12:06:19.893735 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/infra-operator@sha256:0144c53f5c318a2a2a690f358f5574fd4c1bd580e75e738cea935f8df95e52a9" Jan 04 12:06:19 crc kubenswrapper[5003]: E0104 12:06:19.894385 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:0144c53f5c318a2a2a690f358f5574fd4c1bd580e75e738cea935f8df95e52a9,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-k6n4z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-6d99759cf-mwxxf_openstack-operators(c04604e1-db01-4451-8a27-e439e8f5a94c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:06:19 crc kubenswrapper[5003]: E0104 12:06:19.895596 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/infra-operator-controller-manager-6d99759cf-mwxxf" podUID="c04604e1-db01-4451-8a27-e439e8f5a94c" Jan 04 12:06:20 crc kubenswrapper[5003]: E0104 12:06:20.317410 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:0144c53f5c318a2a2a690f358f5574fd4c1bd580e75e738cea935f8df95e52a9\\\"\"" pod="openstack-operators/infra-operator-controller-manager-6d99759cf-mwxxf" podUID="c04604e1-db01-4451-8a27-e439e8f5a94c" Jan 04 12:06:21 crc kubenswrapper[5003]: E0104 12:06:21.727611 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:5d09c9ffa6ee479724f6da786cb35902b87578365dac2035c222f5e4f752d208" Jan 04 12:06:21 crc kubenswrapper[5003]: E0104 12:06:21.729003 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:5d09c9ffa6ee479724f6da786cb35902b87578365dac2035c222f5e4f752d208,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-baremetal-operator-agent@sha256:2737c1e0f9a351672ecbbcaac767cbd282d76a227fdfde541196ae0db13ea99d,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_ANSIBLEEE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-ansibleee-runner@sha256:7a60fcdb83cafec84d729232e83f3846573a0d089daa1714b176be219ce60847,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-api@sha256:36946a77001110f391fb254ec77129803a6b7c34dacfa1a4c8c51aa8d23d57c5,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_EVALUATOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-evaluator@sha256:dd58b29b5d88662a621c685c2b76fe8a71cc9e82aa85dff22a66182a6ceef3ae,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-listener@sha256:fc47ed1c6249c9f6ef13ef1eac82d5a34819a715dea5117d33df0d0dc69ace8b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_NOTIFIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-notifier@sha256:e21d35c272d016f4dbd323dc827ee83538c96674adfb188e362aa652ce167b61,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_APACHE_IMAGE_URL_DEFAULT,Value:registry.redhat.io/ubi9/httpd-24@sha256:4e83e90dc25d3088a0c66b07db2fff3fac4d6c879d62a5a90149edc341e58e2d,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:fe32d3ea620f0c7ecfdde9bbf28417fde03bc18c6f60b1408fa8da24d8188f16,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_KEYSTONE_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-keystone-listener@sha256:c2ace235f775334be02d78928802b76309543e869cc6b4b55843ee546691e6c3,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-worker@sha256:be77cc58b87f299b42bb2cbe74f3f8d028b8c887851a53209441b60e1363aeb5,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-central@sha256:5a548c25fe3d02f7a042cb0a6d28fc8039a34c4a3b3d07aadda4aba3a926e777,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-compute@sha256:41dc9cf27a902d9c7b392d730bd761cf3c391a548a841e9e4d38e1571f3c53bf,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_IPMI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-ipmi@sha256:174f8f712eb5fdda5061a1a68624befb27bbe766842653788583ec74c5ae506a,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_MYSQLD_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/mysqld-exporter@sha256:7211a617ec657701ca819aa0ba28e1d5750f5bf2c1391b755cc4a48cc360b0fa,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_NOTIFICATION_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-notification@sha256:df14f6de785b8aefc38ceb5b47088405224cfa914977c9ab811514cc77b08a67,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_SGCORE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/sg-core@sha256:09b5017c95d7697e66b9c64846bc48ef5826a009cba89b956ec54561e5f4a2d1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b59b7445e581cc720038107e421371c86c5765b2967e77d884ef29b1d9fd0f49,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_BACKUP_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-backup@sha256:b8d76f96b6f17a3318d089c0b5c0e6c292d969ab392cdcc708ec0f0188c953ae,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-scheduler@sha256:43c55407c7c9b4141482533546e6570535373f7e36df374dfbbe388293c19dbf,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_VOLUME_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-volume@sha256:097816f289af117f14cd8ee1678a9635e8da6de4a1bde834d02199c4ef65c5c0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLOUDKITTY_API_IMAGE_URL_DEFAULT,Value:quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api@sha256:afd4d02331b4ec29bcaa9d0ae009b8e1afbe0ae9ac8c71c02710440acbd0318a,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLOUDKITTY_PROC_IMAGE_URL_DEFAULT,Value:quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-processor@sha256:9c8b22836e860dc930be7efbb58f29a0a58674597b3ec6572094064bade38726,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-api@sha256:281668af8ed34c2464f3593d350cf7b695b41b81f40cc539ad74b7b65822afb9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_BACKENDBIND9_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-backend-bind9@sha256:84319e5dd6569ea531e64b688557c2a2e20deb5225f3d349e402e34858f00fe7,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-central@sha256:acb53e0e210562091843c212bc0cf5541daacd6f2bd18923430bae8c36578731,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_MDNS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-mdns@sha256:be6f4002842ebadf30d035721567a7e669f12a6eef8c00dc89030b3b08f3dd2c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_PRODUCER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-producer@sha256:988635be61f6ed8c0d707622193b7efe8e9b1dc7effbf9b09d2db5ec593b59e7,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_UNBOUND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-unbound@sha256:63e08752678a68571e1c54ceea42c113af493a04cdc22198a3713df7b53f87e5,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-worker@sha256:6741d06b0f1bbeb2968807dc5be45853cdd3dfb9cc7ea6ef23e909ae24f3cbf4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_FRR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-frr@sha256:1803a36d1a397a5595dddb4a2f791ab9443d3af97391a53928fa495ca7032d93,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_ISCSID_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-iscsid@sha256:d163fcf801d67d9c67b2ae4368675b75714db7c531de842aad43979a888c5d57,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_KEPLER_IMAGE_URL_DEFAULT,Value:quay.io/sustainable_computing_io/kepler@sha256:581b65b646301e0fcb07582150ba63438f1353a85bf9acf1eb2acb4ce71c58bd,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_LOGROTATE_CROND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cron@sha256:15bf81d933a44128cb6f3264632a9563337eb3bfe82c4a33c746595467d3b0c3,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_MULTIPATHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-multipathd@sha256:df38dbd6b3eccec2abaa8e3618a385405ccec1b73ae8c3573a138b0c961ed31f,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_DHCP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-dhcp-agent@sha256:3a08e21338f651a90ee83ae46242b8c80c64488144f27a77848517049c3a8f5d,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_METADATA_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn@sha256:85729a662800e6b42ceb088545fed39a2ac58704b4a37fd540cdef3ebf9e59a2,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_OVN_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-ovn-agent@sha256:ebeb4443ab9f9360925f7abd9c24b7a453390d678f79ed247d2042dcc6f9c3fc,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_SRIOV_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-sriov-agent@sha256:04bb4cd601b08034c6cba18e701fcd36026ec4340402ed710a0bbd09d8e4884d,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NODE_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/node-exporter@sha256:39c642b2b337e38c18e80266fb14383754178202f40103646337722a594d984c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_OVN_BGP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-bgp-agent@sha256:27b80783b7d4658d89dda9a09924e9ee472908a8fa1c86bcf3f773d17a4196e0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_PODMAN_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/navidys/prometheus-podman-exporter@sha256:d339ba049bbd1adccb795962bf163f5b22fd84dea865d88b9eb525e46247d6bd,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_GLANCE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-glance-api@sha256:e4aa4ebbb1e581a12040e9ad2ae2709ac31b5d965bb64fc4252d1028b05c565f,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api@sha256:8cb133c5a5551e1aa11ef3326149db1babbf00924d0ff493ebe3346b69fd4b5b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_CFNAPI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api-cfn@sha256:13c3567176bb2d033f6c6b30e20404bd67a217e2537210bf222f3afe0c8619b7,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-engine@sha256:60ac3446d57f1a97a6ca2d8e6584b00aa18704bc2707a7ac1a6a28c6d685d215,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HORIZON_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-horizon@sha256:dd7600bc5278c663cfcfecafd3fb051a2cd2ddc3c1efb07738bf09512aa23ae7,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_MEMCACHED_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-memcached@sha256:e47191ba776414b781b3e27b856ab45a03b9480c7dc2b1addb939608794882dc,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_REDIS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-redis@sha256:7e7788d1aae251e60f4012870140c65bce9760cd27feaeec5f65c42fe4ffce77,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-api@sha256:6a401117007514660c694248adce8136d83559caf1b38e475935335e09ac954a,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-conductor@sha256:364d50f873551805782c23264570eff40e3807f35d9bccdd456515b4e31da488,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_INSPECTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-inspector@sha256:2d72dd490576e0cb670d21a08420888f3758d64ed0cbd2ef8b9aa8488ad2ce40,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_NEUTRON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-neutron-agent@sha256:96fdf7cddf31509ee63950a9d61320d0b01beb1212e28f37a6e872d6589ded22,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PXE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-pxe@sha256:8b7534a2999075f919fc162d21f76026e8bf781913cc3d2ac07e484e9b2fc596,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PYTHON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/ironic-python-agent@sha256:d65eaaea2ab02d63af9d8a106619908fa01a2e56bd6753edc5590e66e46270db,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KEYSTONE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-keystone@sha256:d042d7f91bafb002affff8cf750d694a0da129377255c502028528fe2280e790,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KSM_IMAGE_URL_DEFAULT,Value:registry.k8s.io/kube-state-metrics/kube-state-metrics@sha256:db384bf43222b066c378e77027a675d4cd9911107adba46c2922b3a55e10d6fb,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-api@sha256:a8faef9ea5e8ef8327b7fbb9b9cafc74c38c09c7e3b2365a7cad5eb49766f71d,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-scheduler@sha256:88aa46ea03a5584560806aa4b093584fda6b2f54c562005b72be2e3615688090,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SHARE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-share@sha256:c08ecdfb7638c1897004347d835bdbabacff40a345f64c2b3111c377096bfa56,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MARIADB_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NET_UTILS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-netutils@sha256:8b4025a4f30e83acc0b51ac063eea701006a302a1acbdec53f54b540270887f7,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NEUTRON_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-api@sha256:4992f5ddbd20cca07e750846b2dbe7c51c5766c3002c388f8d8a158e347ec63d,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-compute@sha256:526afed30c44ef41d54d63a4f4db122bc603f775243ae350a59d2e0b5050076b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-conductor@sha256:22f097cb86b28ac48dc670ed7e0e841280bef1608f11b2b4536fbc2d2a6a90be,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_NOVNC_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-novncproxy@sha256:20b3ad38accb9eb8849599280a263d3436a5af03d89645e5ec4508586297ffde,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-scheduler@sha256:378ed518b68ea809cffa2ff7a93d51e52cfc53af14eedc978924fdabccef0325,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-api@sha256:8c3632033f8c004f31a1c7c57c5ca7b450a11e9170a220b8943b57f80717c70c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HEALTHMANAGER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-health-manager@sha256:3f746f7c6a8c48c0f4a800dcb4bc49bfbc4de4a9ca6a55d8f22bc515a92ea1d9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HOUSEKEEPING_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-housekeeping@sha256:e1f7bf105190c3cbbfcf0aeeb77a92d1466100ba8377221ed5eee228949e05bd,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_RSYSLOG_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rsyslog@sha256:954b4c60705b229a968aba3b5b35ab02759378706103ed1189fae3e3316fac35,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-worker@sha256:f2e0025727efb95efa65e6af6338ae3fc79bf61095d6d54931a0be8d7fe9acac,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_CLIENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-openstackclient@sha256:2b4f8494513a3af102066fec5868ab167ac8664aceb2f0c639d7a0b60260a944,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_MUST_GATHER_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-must-gather@sha256:854a802357b4f565a366fce3bf29b20c1b768ec4ab7e822ef52dfc2fef000d2c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_NETWORK_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-network-exporter@sha256:ecd56e6733c475f2d441344fd98f288c3eac0261ba113695fec7520a954ccbc7,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OS_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/edpm-hardened-uefi@sha256:194121c2d79401bd41f75428a437fe32a5806a6a160f7d80798ff66baed9afa5,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-controller@sha256:fa24ce4aa285e3632c86a53e8d0385d4c788d049da42dd06570ad9d44aae00de,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_OVS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-base@sha256:df45459c449f64cc6471e98c0890ac00dcc77a940f85d4e7e9d9dd52990d65b3,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server@sha256:947c1bb9373b7d3f2acea104a5666e394c830111bf80d133f1fe7238e4d06f28,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NORTHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-northd@sha256:425ebddc9d6851ee9c730e67eaf43039943dc7937fb11332a41335a9114b2d44,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_SB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server@sha256:bea03c7c34dc6ef8bc163e12a8940011b8feebc44a2efaaba2d3c4c6c515d6c8,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PLACEMENT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-placement-api@sha256:33f4e5f7a715d48482ec46a42267ea992fa268585303c4f1bd3cbea072a6348b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_RABBITMQ_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:e733252aab7f4bc0efbdd712bcd88e44c5498bf1773dba843bc9dcfac324fe3d,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_ACCOUNT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-account@sha256:a2280bc80b454dc9e5c95daf74b8a53d6f9e42fc16d45287e089fc41014fe1da,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-container@sha256:88d687a7bb593b2e61598b422baba84d67c114419590a6d83d15327d119ce208,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_OBJECT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-object@sha256:2635e02b99d380b2e547013c09c6c8da01bc89b3d3ce570e4d8f8656c7635b0e,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_PROXY_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-proxy-server@sha256:ac7fefe1c93839c7ccb2aaa0a18751df0e9f64a36a3b4cc1b81d82d7774b8b45,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_TEST_TEMPEST_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-tempest-all@sha256:a357cf166caaeea230f8a912aceb042e3170c5d680844e8f97b936baa10834ed,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-api@sha256:79c9efc6a45fa22aeaff8485be7103b90ddb87c9142e851405e25df6655487e2,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_APPLIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-applier@sha256:3cf9b4c9342d559b2c1ba8124e5c06fb01c7ce2706bab6bd8adbdec983ecc9ce,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_DECISION_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-decision-engine@sha256:bf0297d2832b9bbe3a8eb5b8ff517b3d2a7ce6ba68f224e743d9943f55f727e2,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8crpb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9_openstack-operators(682c6705-26e9-4d83-aaa8-48fa906104dc): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:06:21 crc kubenswrapper[5003]: E0104 12:06:21.731862 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9" podUID="682c6705-26e9-4d83-aaa8-48fa906104dc" Jan 04 12:06:22 crc kubenswrapper[5003]: I0104 12:06:22.330774 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-568985c78-94d29" event={"ID":"157189ac-3baf-49a7-b37b-dacddf4f43af","Type":"ContainerStarted","Data":"254a56cb638c222e78156c49c961d85ba249e8901f4b403989a526052a29e335"} Jan 04 12:06:22 crc kubenswrapper[5003]: I0104 12:06:22.331597 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-568985c78-94d29" Jan 04 12:06:22 crc kubenswrapper[5003]: I0104 12:06:22.332377 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6jlbf" event={"ID":"e798bc76-3f22-4bf8-b337-bf3bf03ca3b2","Type":"ContainerStarted","Data":"d363d4c13b1985b518f71aef86c9e8040d1040b1ae2ec298c1e12451c10f804e"} Jan 04 12:06:22 crc kubenswrapper[5003]: I0104 12:06:22.344175 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-h7zz8" event={"ID":"bc716961-01bb-4e23-a58e-f44f81d91bee","Type":"ContainerStarted","Data":"53ff07229b6aac27f429cc001b224e3645c3c56025d65799f753b0500d4fc987"} Jan 04 12:06:22 crc kubenswrapper[5003]: I0104 12:06:22.345793 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-h7zz8" Jan 04 12:06:22 crc kubenswrapper[5003]: E0104 12:06:22.347071 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:5d09c9ffa6ee479724f6da786cb35902b87578365dac2035c222f5e4f752d208\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9" podUID="682c6705-26e9-4d83-aaa8-48fa906104dc" Jan 04 12:06:22 crc kubenswrapper[5003]: I0104 12:06:22.405810 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-568985c78-94d29" podStartSLOduration=4.516156459 podStartE2EDuration="57.405784413s" podCreationTimestamp="2026-01-04 12:05:25 +0000 UTC" firstStartedPulling="2026-01-04 12:05:28.81627906 +0000 UTC m=+1044.289308911" lastFinishedPulling="2026-01-04 12:06:21.705906984 +0000 UTC m=+1097.178936865" observedRunningTime="2026-01-04 12:06:22.363343478 +0000 UTC m=+1097.836373319" watchObservedRunningTime="2026-01-04 12:06:22.405784413 +0000 UTC m=+1097.878814254" Jan 04 12:06:22 crc kubenswrapper[5003]: I0104 12:06:22.426564 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-h7zz8" podStartSLOduration=5.04148241 podStartE2EDuration="57.426533719s" podCreationTimestamp="2026-01-04 12:05:25 +0000 UTC" firstStartedPulling="2026-01-04 12:05:29.322295033 +0000 UTC m=+1044.795324874" lastFinishedPulling="2026-01-04 12:06:21.707346292 +0000 UTC m=+1097.180376183" observedRunningTime="2026-01-04 12:06:22.419149075 +0000 UTC m=+1097.892178926" watchObservedRunningTime="2026-01-04 12:06:22.426533719 +0000 UTC m=+1097.899563600" Jan 04 12:06:22 crc kubenswrapper[5003]: I0104 12:06:22.445704 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-6jlbf" podStartSLOduration=3.860607755 podStartE2EDuration="56.445679032s" podCreationTimestamp="2026-01-04 12:05:26 +0000 UTC" firstStartedPulling="2026-01-04 12:05:29.32255882 +0000 UTC m=+1044.795588661" lastFinishedPulling="2026-01-04 12:06:21.907630057 +0000 UTC m=+1097.380659938" observedRunningTime="2026-01-04 12:06:22.443212897 +0000 UTC m=+1097.916242738" watchObservedRunningTime="2026-01-04 12:06:22.445679032 +0000 UTC m=+1097.918708913" Jan 04 12:06:25 crc kubenswrapper[5003]: I0104 12:06:25.392172 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-2kxcj" event={"ID":"25453bdc-8892-48d4-aca5-cb9549e9e59d","Type":"ContainerStarted","Data":"ce79cf1551689adb0b8ab54d11e5685f617bc7cb29f209c3df51e45a04423cdc"} Jan 04 12:06:25 crc kubenswrapper[5003]: I0104 12:06:25.392767 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-2kxcj" Jan 04 12:06:25 crc kubenswrapper[5003]: I0104 12:06:25.443080 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-2kxcj" podStartSLOduration=5.689740372 podStartE2EDuration="1m0.443052459s" podCreationTimestamp="2026-01-04 12:05:25 +0000 UTC" firstStartedPulling="2026-01-04 12:05:29.54205436 +0000 UTC m=+1045.015084201" lastFinishedPulling="2026-01-04 12:06:24.295366447 +0000 UTC m=+1099.768396288" observedRunningTime="2026-01-04 12:06:25.439450664 +0000 UTC m=+1100.912480515" watchObservedRunningTime="2026-01-04 12:06:25.443052459 +0000 UTC m=+1100.916082310" Jan 04 12:06:32 crc kubenswrapper[5003]: I0104 12:06:32.548950 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-9b6f8f78c-pfsq5" event={"ID":"033a9e41-3e14-4a99-9e2c-9ad9151b8cea","Type":"ContainerStarted","Data":"e529ea43f29c5e2e7fb81d00b2ca151b1b88776cf2923aa72c620958e77b200f"} Jan 04 12:06:32 crc kubenswrapper[5003]: I0104 12:06:32.551488 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-9b6f8f78c-pfsq5" Jan 04 12:06:32 crc kubenswrapper[5003]: I0104 12:06:32.572965 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-9b6f8f78c-pfsq5" podStartSLOduration=4.571198199 podStartE2EDuration="1m6.572942623s" podCreationTimestamp="2026-01-04 12:05:26 +0000 UTC" firstStartedPulling="2026-01-04 12:05:29.526120721 +0000 UTC m=+1044.999150562" lastFinishedPulling="2026-01-04 12:06:31.527865145 +0000 UTC m=+1107.000894986" observedRunningTime="2026-01-04 12:06:32.569277548 +0000 UTC m=+1108.042307459" watchObservedRunningTime="2026-01-04 12:06:32.572942623 +0000 UTC m=+1108.045972494" Jan 04 12:06:33 crc kubenswrapper[5003]: I0104 12:06:33.558211 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-6d99759cf-mwxxf" event={"ID":"c04604e1-db01-4451-8a27-e439e8f5a94c","Type":"ContainerStarted","Data":"28da08ca23bf8e2938503d83189ce631bc43175197b4fda984d1c7940eb540d2"} Jan 04 12:06:33 crc kubenswrapper[5003]: I0104 12:06:33.558842 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-6d99759cf-mwxxf" Jan 04 12:06:36 crc kubenswrapper[5003]: I0104 12:06:36.267232 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-568985c78-94d29" Jan 04 12:06:36 crc kubenswrapper[5003]: I0104 12:06:36.300194 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-6d99759cf-mwxxf" podStartSLOduration=43.106976714 podStartE2EDuration="1m11.300165165s" podCreationTimestamp="2026-01-04 12:05:25 +0000 UTC" firstStartedPulling="2026-01-04 12:06:05.168870031 +0000 UTC m=+1080.641899872" lastFinishedPulling="2026-01-04 12:06:33.362058482 +0000 UTC m=+1108.835088323" observedRunningTime="2026-01-04 12:06:33.578677097 +0000 UTC m=+1109.051706948" watchObservedRunningTime="2026-01-04 12:06:36.300165165 +0000 UTC m=+1111.773195046" Jan 04 12:06:36 crc kubenswrapper[5003]: I0104 12:06:36.510865 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-5fbbf8b6cc-h7zz8" Jan 04 12:06:36 crc kubenswrapper[5003]: I0104 12:06:36.529002 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-68c649d9d-2kxcj" Jan 04 12:06:36 crc kubenswrapper[5003]: I0104 12:06:36.746441 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-9b6f8f78c-pfsq5" Jan 04 12:06:38 crc kubenswrapper[5003]: I0104 12:06:38.621285 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9" event={"ID":"682c6705-26e9-4d83-aaa8-48fa906104dc","Type":"ContainerStarted","Data":"346da8866c900c9bda32a957b44448e307a2e55751228540c12af7fe91e70845"} Jan 04 12:06:38 crc kubenswrapper[5003]: I0104 12:06:38.623078 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9" Jan 04 12:06:38 crc kubenswrapper[5003]: I0104 12:06:38.693649 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9" podStartSLOduration=40.779225159 podStartE2EDuration="1m13.69362653s" podCreationTimestamp="2026-01-04 12:05:25 +0000 UTC" firstStartedPulling="2026-01-04 12:06:05.351930613 +0000 UTC m=+1080.824960454" lastFinishedPulling="2026-01-04 12:06:38.266331984 +0000 UTC m=+1113.739361825" observedRunningTime="2026-01-04 12:06:38.693091686 +0000 UTC m=+1114.166121527" watchObservedRunningTime="2026-01-04 12:06:38.69362653 +0000 UTC m=+1114.166656371" Jan 04 12:06:39 crc kubenswrapper[5003]: I0104 12:06:39.418891 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:06:39 crc kubenswrapper[5003]: I0104 12:06:39.418986 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:06:41 crc kubenswrapper[5003]: I0104 12:06:41.796613 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-6d99759cf-mwxxf" Jan 04 12:06:48 crc kubenswrapper[5003]: I0104 12:06:48.404280 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9" Jan 04 12:07:04 crc kubenswrapper[5003]: I0104 12:07:04.459764 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-ngz7q"] Jan 04 12:07:04 crc kubenswrapper[5003]: I0104 12:07:04.461882 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bb9d8bd9-ngz7q" Jan 04 12:07:04 crc kubenswrapper[5003]: I0104 12:07:04.464543 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Jan 04 12:07:04 crc kubenswrapper[5003]: I0104 12:07:04.465539 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-6gc6d" Jan 04 12:07:04 crc kubenswrapper[5003]: I0104 12:07:04.465610 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Jan 04 12:07:04 crc kubenswrapper[5003]: I0104 12:07:04.465672 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Jan 04 12:07:04 crc kubenswrapper[5003]: I0104 12:07:04.481898 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-ngz7q"] Jan 04 12:07:04 crc kubenswrapper[5003]: I0104 12:07:04.553992 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-bgtgw"] Jan 04 12:07:04 crc kubenswrapper[5003]: I0104 12:07:04.562365 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f854695bc-bgtgw" Jan 04 12:07:04 crc kubenswrapper[5003]: I0104 12:07:04.567906 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Jan 04 12:07:04 crc kubenswrapper[5003]: I0104 12:07:04.570543 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-bgtgw"] Jan 04 12:07:04 crc kubenswrapper[5003]: I0104 12:07:04.602844 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86288312-c398-4402-a6dc-1f2a2fd75e0d-config\") pod \"dnsmasq-dns-84bb9d8bd9-ngz7q\" (UID: \"86288312-c398-4402-a6dc-1f2a2fd75e0d\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-ngz7q" Jan 04 12:07:04 crc kubenswrapper[5003]: I0104 12:07:04.602912 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g46jr\" (UniqueName: \"kubernetes.io/projected/86288312-c398-4402-a6dc-1f2a2fd75e0d-kube-api-access-g46jr\") pod \"dnsmasq-dns-84bb9d8bd9-ngz7q\" (UID: \"86288312-c398-4402-a6dc-1f2a2fd75e0d\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-ngz7q" Jan 04 12:07:04 crc kubenswrapper[5003]: I0104 12:07:04.704337 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86288312-c398-4402-a6dc-1f2a2fd75e0d-config\") pod \"dnsmasq-dns-84bb9d8bd9-ngz7q\" (UID: \"86288312-c398-4402-a6dc-1f2a2fd75e0d\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-ngz7q" Jan 04 12:07:04 crc kubenswrapper[5003]: I0104 12:07:04.704400 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g46jr\" (UniqueName: \"kubernetes.io/projected/86288312-c398-4402-a6dc-1f2a2fd75e0d-kube-api-access-g46jr\") pod \"dnsmasq-dns-84bb9d8bd9-ngz7q\" (UID: \"86288312-c398-4402-a6dc-1f2a2fd75e0d\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-ngz7q" Jan 04 12:07:04 crc kubenswrapper[5003]: I0104 12:07:04.704442 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpnrm\" (UniqueName: \"kubernetes.io/projected/3f93d660-567b-4875-91f4-a2609d8a0ab8-kube-api-access-gpnrm\") pod \"dnsmasq-dns-5f854695bc-bgtgw\" (UID: \"3f93d660-567b-4875-91f4-a2609d8a0ab8\") " pod="openstack/dnsmasq-dns-5f854695bc-bgtgw" Jan 04 12:07:04 crc kubenswrapper[5003]: I0104 12:07:04.704488 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f93d660-567b-4875-91f4-a2609d8a0ab8-config\") pod \"dnsmasq-dns-5f854695bc-bgtgw\" (UID: \"3f93d660-567b-4875-91f4-a2609d8a0ab8\") " pod="openstack/dnsmasq-dns-5f854695bc-bgtgw" Jan 04 12:07:04 crc kubenswrapper[5003]: I0104 12:07:04.704549 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f93d660-567b-4875-91f4-a2609d8a0ab8-dns-svc\") pod \"dnsmasq-dns-5f854695bc-bgtgw\" (UID: \"3f93d660-567b-4875-91f4-a2609d8a0ab8\") " pod="openstack/dnsmasq-dns-5f854695bc-bgtgw" Jan 04 12:07:04 crc kubenswrapper[5003]: I0104 12:07:04.705383 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86288312-c398-4402-a6dc-1f2a2fd75e0d-config\") pod \"dnsmasq-dns-84bb9d8bd9-ngz7q\" (UID: \"86288312-c398-4402-a6dc-1f2a2fd75e0d\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-ngz7q" Jan 04 12:07:04 crc kubenswrapper[5003]: I0104 12:07:04.724919 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g46jr\" (UniqueName: \"kubernetes.io/projected/86288312-c398-4402-a6dc-1f2a2fd75e0d-kube-api-access-g46jr\") pod \"dnsmasq-dns-84bb9d8bd9-ngz7q\" (UID: \"86288312-c398-4402-a6dc-1f2a2fd75e0d\") " pod="openstack/dnsmasq-dns-84bb9d8bd9-ngz7q" Jan 04 12:07:04 crc kubenswrapper[5003]: I0104 12:07:04.806471 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f93d660-567b-4875-91f4-a2609d8a0ab8-dns-svc\") pod \"dnsmasq-dns-5f854695bc-bgtgw\" (UID: \"3f93d660-567b-4875-91f4-a2609d8a0ab8\") " pod="openstack/dnsmasq-dns-5f854695bc-bgtgw" Jan 04 12:07:04 crc kubenswrapper[5003]: I0104 12:07:04.806565 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpnrm\" (UniqueName: \"kubernetes.io/projected/3f93d660-567b-4875-91f4-a2609d8a0ab8-kube-api-access-gpnrm\") pod \"dnsmasq-dns-5f854695bc-bgtgw\" (UID: \"3f93d660-567b-4875-91f4-a2609d8a0ab8\") " pod="openstack/dnsmasq-dns-5f854695bc-bgtgw" Jan 04 12:07:04 crc kubenswrapper[5003]: I0104 12:07:04.806608 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f93d660-567b-4875-91f4-a2609d8a0ab8-config\") pod \"dnsmasq-dns-5f854695bc-bgtgw\" (UID: \"3f93d660-567b-4875-91f4-a2609d8a0ab8\") " pod="openstack/dnsmasq-dns-5f854695bc-bgtgw" Jan 04 12:07:04 crc kubenswrapper[5003]: I0104 12:07:04.807475 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f93d660-567b-4875-91f4-a2609d8a0ab8-config\") pod \"dnsmasq-dns-5f854695bc-bgtgw\" (UID: \"3f93d660-567b-4875-91f4-a2609d8a0ab8\") " pod="openstack/dnsmasq-dns-5f854695bc-bgtgw" Jan 04 12:07:04 crc kubenswrapper[5003]: I0104 12:07:04.809148 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Jan 04 12:07:04 crc kubenswrapper[5003]: I0104 12:07:04.809379 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-6gc6d" Jan 04 12:07:04 crc kubenswrapper[5003]: I0104 12:07:04.816872 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bb9d8bd9-ngz7q" Jan 04 12:07:04 crc kubenswrapper[5003]: I0104 12:07:04.817619 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f93d660-567b-4875-91f4-a2609d8a0ab8-dns-svc\") pod \"dnsmasq-dns-5f854695bc-bgtgw\" (UID: \"3f93d660-567b-4875-91f4-a2609d8a0ab8\") " pod="openstack/dnsmasq-dns-5f854695bc-bgtgw" Jan 04 12:07:04 crc kubenswrapper[5003]: I0104 12:07:04.842571 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpnrm\" (UniqueName: \"kubernetes.io/projected/3f93d660-567b-4875-91f4-a2609d8a0ab8-kube-api-access-gpnrm\") pod \"dnsmasq-dns-5f854695bc-bgtgw\" (UID: \"3f93d660-567b-4875-91f4-a2609d8a0ab8\") " pod="openstack/dnsmasq-dns-5f854695bc-bgtgw" Jan 04 12:07:04 crc kubenswrapper[5003]: I0104 12:07:04.887698 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f854695bc-bgtgw" Jan 04 12:07:05 crc kubenswrapper[5003]: I0104 12:07:05.574260 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-bgtgw"] Jan 04 12:07:05 crc kubenswrapper[5003]: W0104 12:07:05.583221 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3f93d660_567b_4875_91f4_a2609d8a0ab8.slice/crio-36321367dccf32f3699b17b62ddbdfcbe59114d706ee113f468d465b2a3ab7b5 WatchSource:0}: Error finding container 36321367dccf32f3699b17b62ddbdfcbe59114d706ee113f468d465b2a3ab7b5: Status 404 returned error can't find the container with id 36321367dccf32f3699b17b62ddbdfcbe59114d706ee113f468d465b2a3ab7b5 Jan 04 12:07:05 crc kubenswrapper[5003]: I0104 12:07:05.630316 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-ngz7q"] Jan 04 12:07:05 crc kubenswrapper[5003]: W0104 12:07:05.633478 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod86288312_c398_4402_a6dc_1f2a2fd75e0d.slice/crio-9769d0f8a19da603f4ee4f4cb7b6db26c3acbde7d2db0c48d543217ab32a86c5 WatchSource:0}: Error finding container 9769d0f8a19da603f4ee4f4cb7b6db26c3acbde7d2db0c48d543217ab32a86c5: Status 404 returned error can't find the container with id 9769d0f8a19da603f4ee4f4cb7b6db26c3acbde7d2db0c48d543217ab32a86c5 Jan 04 12:07:05 crc kubenswrapper[5003]: I0104 12:07:05.894657 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84bb9d8bd9-ngz7q" event={"ID":"86288312-c398-4402-a6dc-1f2a2fd75e0d","Type":"ContainerStarted","Data":"9769d0f8a19da603f4ee4f4cb7b6db26c3acbde7d2db0c48d543217ab32a86c5"} Jan 04 12:07:05 crc kubenswrapper[5003]: I0104 12:07:05.895351 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f854695bc-bgtgw" event={"ID":"3f93d660-567b-4875-91f4-a2609d8a0ab8","Type":"ContainerStarted","Data":"36321367dccf32f3699b17b62ddbdfcbe59114d706ee113f468d465b2a3ab7b5"} Jan 04 12:07:07 crc kubenswrapper[5003]: I0104 12:07:07.606274 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-bgtgw"] Jan 04 12:07:07 crc kubenswrapper[5003]: I0104 12:07:07.766502 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-w5hkf"] Jan 04 12:07:07 crc kubenswrapper[5003]: I0104 12:07:07.769720 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-744ffd65bc-w5hkf" Jan 04 12:07:07 crc kubenswrapper[5003]: I0104 12:07:07.851924 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-w5hkf"] Jan 04 12:07:07 crc kubenswrapper[5003]: I0104 12:07:07.956815 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637-config\") pod \"dnsmasq-dns-744ffd65bc-w5hkf\" (UID: \"1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637\") " pod="openstack/dnsmasq-dns-744ffd65bc-w5hkf" Jan 04 12:07:07 crc kubenswrapper[5003]: I0104 12:07:07.956880 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6g2fr\" (UniqueName: \"kubernetes.io/projected/1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637-kube-api-access-6g2fr\") pod \"dnsmasq-dns-744ffd65bc-w5hkf\" (UID: \"1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637\") " pod="openstack/dnsmasq-dns-744ffd65bc-w5hkf" Jan 04 12:07:07 crc kubenswrapper[5003]: I0104 12:07:07.959537 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637-dns-svc\") pod \"dnsmasq-dns-744ffd65bc-w5hkf\" (UID: \"1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637\") " pod="openstack/dnsmasq-dns-744ffd65bc-w5hkf" Jan 04 12:07:08 crc kubenswrapper[5003]: I0104 12:07:08.067243 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637-dns-svc\") pod \"dnsmasq-dns-744ffd65bc-w5hkf\" (UID: \"1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637\") " pod="openstack/dnsmasq-dns-744ffd65bc-w5hkf" Jan 04 12:07:08 crc kubenswrapper[5003]: I0104 12:07:08.067413 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637-config\") pod \"dnsmasq-dns-744ffd65bc-w5hkf\" (UID: \"1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637\") " pod="openstack/dnsmasq-dns-744ffd65bc-w5hkf" Jan 04 12:07:08 crc kubenswrapper[5003]: I0104 12:07:08.067440 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6g2fr\" (UniqueName: \"kubernetes.io/projected/1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637-kube-api-access-6g2fr\") pod \"dnsmasq-dns-744ffd65bc-w5hkf\" (UID: \"1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637\") " pod="openstack/dnsmasq-dns-744ffd65bc-w5hkf" Jan 04 12:07:08 crc kubenswrapper[5003]: I0104 12:07:08.068184 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637-dns-svc\") pod \"dnsmasq-dns-744ffd65bc-w5hkf\" (UID: \"1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637\") " pod="openstack/dnsmasq-dns-744ffd65bc-w5hkf" Jan 04 12:07:08 crc kubenswrapper[5003]: I0104 12:07:08.068287 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637-config\") pod \"dnsmasq-dns-744ffd65bc-w5hkf\" (UID: \"1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637\") " pod="openstack/dnsmasq-dns-744ffd65bc-w5hkf" Jan 04 12:07:08 crc kubenswrapper[5003]: I0104 12:07:08.155768 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-ngz7q"] Jan 04 12:07:08 crc kubenswrapper[5003]: I0104 12:07:08.188181 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6g2fr\" (UniqueName: \"kubernetes.io/projected/1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637-kube-api-access-6g2fr\") pod \"dnsmasq-dns-744ffd65bc-w5hkf\" (UID: \"1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637\") " pod="openstack/dnsmasq-dns-744ffd65bc-w5hkf" Jan 04 12:07:08 crc kubenswrapper[5003]: I0104 12:07:08.232652 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-4fhgm"] Jan 04 12:07:08 crc kubenswrapper[5003]: I0104 12:07:08.233915 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95f5f6995-4fhgm" Jan 04 12:07:08 crc kubenswrapper[5003]: I0104 12:07:08.245130 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-4fhgm"] Jan 04 12:07:08 crc kubenswrapper[5003]: I0104 12:07:08.286785 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/21282961-7d45-4f5f-8e71-b7d369b23110-dns-svc\") pod \"dnsmasq-dns-95f5f6995-4fhgm\" (UID: \"21282961-7d45-4f5f-8e71-b7d369b23110\") " pod="openstack/dnsmasq-dns-95f5f6995-4fhgm" Jan 04 12:07:08 crc kubenswrapper[5003]: I0104 12:07:08.286861 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bdxr\" (UniqueName: \"kubernetes.io/projected/21282961-7d45-4f5f-8e71-b7d369b23110-kube-api-access-6bdxr\") pod \"dnsmasq-dns-95f5f6995-4fhgm\" (UID: \"21282961-7d45-4f5f-8e71-b7d369b23110\") " pod="openstack/dnsmasq-dns-95f5f6995-4fhgm" Jan 04 12:07:08 crc kubenswrapper[5003]: I0104 12:07:08.286954 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21282961-7d45-4f5f-8e71-b7d369b23110-config\") pod \"dnsmasq-dns-95f5f6995-4fhgm\" (UID: \"21282961-7d45-4f5f-8e71-b7d369b23110\") " pod="openstack/dnsmasq-dns-95f5f6995-4fhgm" Jan 04 12:07:08 crc kubenswrapper[5003]: I0104 12:07:08.388575 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bdxr\" (UniqueName: \"kubernetes.io/projected/21282961-7d45-4f5f-8e71-b7d369b23110-kube-api-access-6bdxr\") pod \"dnsmasq-dns-95f5f6995-4fhgm\" (UID: \"21282961-7d45-4f5f-8e71-b7d369b23110\") " pod="openstack/dnsmasq-dns-95f5f6995-4fhgm" Jan 04 12:07:08 crc kubenswrapper[5003]: I0104 12:07:08.388694 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21282961-7d45-4f5f-8e71-b7d369b23110-config\") pod \"dnsmasq-dns-95f5f6995-4fhgm\" (UID: \"21282961-7d45-4f5f-8e71-b7d369b23110\") " pod="openstack/dnsmasq-dns-95f5f6995-4fhgm" Jan 04 12:07:08 crc kubenswrapper[5003]: I0104 12:07:08.388726 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/21282961-7d45-4f5f-8e71-b7d369b23110-dns-svc\") pod \"dnsmasq-dns-95f5f6995-4fhgm\" (UID: \"21282961-7d45-4f5f-8e71-b7d369b23110\") " pod="openstack/dnsmasq-dns-95f5f6995-4fhgm" Jan 04 12:07:08 crc kubenswrapper[5003]: I0104 12:07:08.389634 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/21282961-7d45-4f5f-8e71-b7d369b23110-dns-svc\") pod \"dnsmasq-dns-95f5f6995-4fhgm\" (UID: \"21282961-7d45-4f5f-8e71-b7d369b23110\") " pod="openstack/dnsmasq-dns-95f5f6995-4fhgm" Jan 04 12:07:08 crc kubenswrapper[5003]: I0104 12:07:08.390670 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21282961-7d45-4f5f-8e71-b7d369b23110-config\") pod \"dnsmasq-dns-95f5f6995-4fhgm\" (UID: \"21282961-7d45-4f5f-8e71-b7d369b23110\") " pod="openstack/dnsmasq-dns-95f5f6995-4fhgm" Jan 04 12:07:08 crc kubenswrapper[5003]: I0104 12:07:08.395322 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-744ffd65bc-w5hkf" Jan 04 12:07:08 crc kubenswrapper[5003]: I0104 12:07:08.410107 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bdxr\" (UniqueName: \"kubernetes.io/projected/21282961-7d45-4f5f-8e71-b7d369b23110-kube-api-access-6bdxr\") pod \"dnsmasq-dns-95f5f6995-4fhgm\" (UID: \"21282961-7d45-4f5f-8e71-b7d369b23110\") " pod="openstack/dnsmasq-dns-95f5f6995-4fhgm" Jan 04 12:07:08 crc kubenswrapper[5003]: I0104 12:07:08.620754 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95f5f6995-4fhgm" Jan 04 12:07:08 crc kubenswrapper[5003]: I0104 12:07:08.919874 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Jan 04 12:07:08 crc kubenswrapper[5003]: I0104 12:07:08.928781 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 04 12:07:08 crc kubenswrapper[5003]: I0104 12:07:08.938644 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Jan 04 12:07:08 crc kubenswrapper[5003]: I0104 12:07:08.938892 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Jan 04 12:07:08 crc kubenswrapper[5003]: I0104 12:07:08.939704 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-hp8qx" Jan 04 12:07:08 crc kubenswrapper[5003]: I0104 12:07:08.939824 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Jan 04 12:07:08 crc kubenswrapper[5003]: I0104 12:07:08.939938 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Jan 04 12:07:08 crc kubenswrapper[5003]: I0104 12:07:08.940087 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Jan 04 12:07:08 crc kubenswrapper[5003]: I0104 12:07:08.940246 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Jan 04 12:07:08 crc kubenswrapper[5003]: I0104 12:07:08.958460 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.151738 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/81193935-fcd0-4877-9d65-6155c1a888e2-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.152193 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/81193935-fcd0-4877-9d65-6155c1a888e2-pod-info\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.152225 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/81193935-fcd0-4877-9d65-6155c1a888e2-server-conf\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.152242 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/81193935-fcd0-4877-9d65-6155c1a888e2-config-data\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.152316 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/81193935-fcd0-4877-9d65-6155c1a888e2-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.152336 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.152353 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/81193935-fcd0-4877-9d65-6155c1a888e2-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.152369 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6t42\" (UniqueName: \"kubernetes.io/projected/81193935-fcd0-4877-9d65-6155c1a888e2-kube-api-access-x6t42\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.152426 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/81193935-fcd0-4877-9d65-6155c1a888e2-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.152469 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/81193935-fcd0-4877-9d65-6155c1a888e2-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.152497 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/81193935-fcd0-4877-9d65-6155c1a888e2-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.255776 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/81193935-fcd0-4877-9d65-6155c1a888e2-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.255823 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/81193935-fcd0-4877-9d65-6155c1a888e2-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.255856 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/81193935-fcd0-4877-9d65-6155c1a888e2-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.255882 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/81193935-fcd0-4877-9d65-6155c1a888e2-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.255913 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/81193935-fcd0-4877-9d65-6155c1a888e2-pod-info\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.255942 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/81193935-fcd0-4877-9d65-6155c1a888e2-config-data\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.255959 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/81193935-fcd0-4877-9d65-6155c1a888e2-server-conf\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.256001 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/81193935-fcd0-4877-9d65-6155c1a888e2-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.256040 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.256063 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/81193935-fcd0-4877-9d65-6155c1a888e2-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.256087 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6t42\" (UniqueName: \"kubernetes.io/projected/81193935-fcd0-4877-9d65-6155c1a888e2-kube-api-access-x6t42\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.257298 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/81193935-fcd0-4877-9d65-6155c1a888e2-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.259467 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.263475 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/81193935-fcd0-4877-9d65-6155c1a888e2-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.263861 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/81193935-fcd0-4877-9d65-6155c1a888e2-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.265207 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/81193935-fcd0-4877-9d65-6155c1a888e2-config-data\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.265564 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/81193935-fcd0-4877-9d65-6155c1a888e2-server-conf\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.273064 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/81193935-fcd0-4877-9d65-6155c1a888e2-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.281737 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/81193935-fcd0-4877-9d65-6155c1a888e2-pod-info\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.289827 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/81193935-fcd0-4877-9d65-6155c1a888e2-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.299807 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/81193935-fcd0-4877-9d65-6155c1a888e2-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.306201 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6t42\" (UniqueName: \"kubernetes.io/projected/81193935-fcd0-4877-9d65-6155c1a888e2-kube-api-access-x6t42\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.319809 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-w5hkf"] Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.347673 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.348862 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.361589 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.361652 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.361588 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.361788 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.361887 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.362583 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.362692 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-fkt8g" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.385935 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.420379 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.420700 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.420557 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.462364 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/829003dc-aa5e-43a6-a4f5-c578c73e76d4-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.462429 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.462460 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/829003dc-aa5e-43a6-a4f5-c578c73e76d4-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.462475 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/829003dc-aa5e-43a6-a4f5-c578c73e76d4-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.462493 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/829003dc-aa5e-43a6-a4f5-c578c73e76d4-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.462519 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/829003dc-aa5e-43a6-a4f5-c578c73e76d4-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.462537 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/829003dc-aa5e-43a6-a4f5-c578c73e76d4-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.462557 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/829003dc-aa5e-43a6-a4f5-c578c73e76d4-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.462603 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/829003dc-aa5e-43a6-a4f5-c578c73e76d4-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.462624 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/829003dc-aa5e-43a6-a4f5-c578c73e76d4-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.462641 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vr8lh\" (UniqueName: \"kubernetes.io/projected/829003dc-aa5e-43a6-a4f5-c578c73e76d4-kube-api-access-vr8lh\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.564299 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/829003dc-aa5e-43a6-a4f5-c578c73e76d4-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.564347 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/829003dc-aa5e-43a6-a4f5-c578c73e76d4-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.564369 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/829003dc-aa5e-43a6-a4f5-c578c73e76d4-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.564395 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/829003dc-aa5e-43a6-a4f5-c578c73e76d4-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.564414 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/829003dc-aa5e-43a6-a4f5-c578c73e76d4-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.564433 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/829003dc-aa5e-43a6-a4f5-c578c73e76d4-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.564485 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/829003dc-aa5e-43a6-a4f5-c578c73e76d4-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.564505 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/829003dc-aa5e-43a6-a4f5-c578c73e76d4-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.564521 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vr8lh\" (UniqueName: \"kubernetes.io/projected/829003dc-aa5e-43a6-a4f5-c578c73e76d4-kube-api-access-vr8lh\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.564561 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/829003dc-aa5e-43a6-a4f5-c578c73e76d4-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.564667 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.565047 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.565586 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/829003dc-aa5e-43a6-a4f5-c578c73e76d4-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.566729 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/829003dc-aa5e-43a6-a4f5-c578c73e76d4-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.568440 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/829003dc-aa5e-43a6-a4f5-c578c73e76d4-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.568633 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/829003dc-aa5e-43a6-a4f5-c578c73e76d4-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.568947 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/829003dc-aa5e-43a6-a4f5-c578c73e76d4-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.570212 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/829003dc-aa5e-43a6-a4f5-c578c73e76d4-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.570741 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/829003dc-aa5e-43a6-a4f5-c578c73e76d4-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.571011 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.573518 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/829003dc-aa5e-43a6-a4f5-c578c73e76d4-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.577518 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/829003dc-aa5e-43a6-a4f5-c578c73e76d4-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.587586 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vr8lh\" (UniqueName: \"kubernetes.io/projected/829003dc-aa5e-43a6-a4f5-c578c73e76d4-kube-api-access-vr8lh\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.596378 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.703786 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-4fhgm"] Jan 04 12:07:09 crc kubenswrapper[5003]: W0104 12:07:09.711545 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod21282961_7d45_4f5f_8e71_b7d369b23110.slice/crio-9588bd1b50e0ebb9403c69b0700e74b08fb9ad421af9dee5d7a09eaa4bc31175 WatchSource:0}: Error finding container 9588bd1b50e0ebb9403c69b0700e74b08fb9ad421af9dee5d7a09eaa4bc31175: Status 404 returned error can't find the container with id 9588bd1b50e0ebb9403c69b0700e74b08fb9ad421af9dee5d7a09eaa4bc31175 Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.720405 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.981263 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744ffd65bc-w5hkf" event={"ID":"1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637","Type":"ContainerStarted","Data":"5e1ebd688a3fc2693195c838e30757f2197aa65b6ac9170ac71d80e31a225d78"} Jan 04 12:07:09 crc kubenswrapper[5003]: I0104 12:07:09.982664 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95f5f6995-4fhgm" event={"ID":"21282961-7d45-4f5f-8e71-b7d369b23110","Type":"ContainerStarted","Data":"9588bd1b50e0ebb9403c69b0700e74b08fb9ad421af9dee5d7a09eaa4bc31175"} Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.088767 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.090312 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.094529 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-v7gwm" Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.095066 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.095108 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.096343 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.106153 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.117105 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.179078 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-config-data-generated\") pod \"openstack-galera-0\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") " pod="openstack/openstack-galera-0" Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.179518 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-kolla-config\") pod \"openstack-galera-0\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") " pod="openstack/openstack-galera-0" Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.179538 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-config-data-default\") pod \"openstack-galera-0\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") " pod="openstack/openstack-galera-0" Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.179557 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") " pod="openstack/openstack-galera-0" Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.179599 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rkz6\" (UniqueName: \"kubernetes.io/projected/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-kube-api-access-4rkz6\") pod \"openstack-galera-0\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") " pod="openstack/openstack-galera-0" Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.179621 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-operator-scripts\") pod \"openstack-galera-0\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") " pod="openstack/openstack-galera-0" Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.179647 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") " pod="openstack/openstack-galera-0" Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.179669 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") " pod="openstack/openstack-galera-0" Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.281874 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rkz6\" (UniqueName: \"kubernetes.io/projected/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-kube-api-access-4rkz6\") pod \"openstack-galera-0\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") " pod="openstack/openstack-galera-0" Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.281925 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-operator-scripts\") pod \"openstack-galera-0\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") " pod="openstack/openstack-galera-0" Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.281961 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") " pod="openstack/openstack-galera-0" Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.281986 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") " pod="openstack/openstack-galera-0" Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.282049 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-config-data-generated\") pod \"openstack-galera-0\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") " pod="openstack/openstack-galera-0" Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.282073 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-config-data-default\") pod \"openstack-galera-0\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") " pod="openstack/openstack-galera-0" Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.282093 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-kolla-config\") pod \"openstack-galera-0\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") " pod="openstack/openstack-galera-0" Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.282111 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") " pod="openstack/openstack-galera-0" Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.287931 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") " pod="openstack/openstack-galera-0" Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.289451 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-operator-scripts\") pod \"openstack-galera-0\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") " pod="openstack/openstack-galera-0" Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.289627 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/openstack-galera-0" Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.302778 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") " pod="openstack/openstack-galera-0" Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.303105 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-config-data-generated\") pod \"openstack-galera-0\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") " pod="openstack/openstack-galera-0" Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.303820 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-config-data-default\") pod \"openstack-galera-0\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") " pod="openstack/openstack-galera-0" Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.304282 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-kolla-config\") pod \"openstack-galera-0\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") " pod="openstack/openstack-galera-0" Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.311696 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rkz6\" (UniqueName: \"kubernetes.io/projected/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-kube-api-access-4rkz6\") pod \"openstack-galera-0\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") " pod="openstack/openstack-galera-0" Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.319938 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.338138 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") " pod="openstack/openstack-galera-0" Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.480453 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.718134 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.948608 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.994683 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"81193935-fcd0-4877-9d65-6155c1a888e2","Type":"ContainerStarted","Data":"fcc464258696e0f890aa3c6646f844036dc3b081256709d7f92e48e33af14189"} Jan 04 12:07:10 crc kubenswrapper[5003]: W0104 12:07:10.995692 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod30c47e0c_622e_4f66_a71d_f7e6cc0f23d9.slice/crio-820ed0f4b8611cfdb78035e27df4f2a9fff7e8716ead65a412cd9e625a125567 WatchSource:0}: Error finding container 820ed0f4b8611cfdb78035e27df4f2a9fff7e8716ead65a412cd9e625a125567: Status 404 returned error can't find the container with id 820ed0f4b8611cfdb78035e27df4f2a9fff7e8716ead65a412cd9e625a125567 Jan 04 12:07:10 crc kubenswrapper[5003]: I0104 12:07:10.996871 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"829003dc-aa5e-43a6-a4f5-c578c73e76d4","Type":"ContainerStarted","Data":"02b4cbf5722d9c379d7df2308f2f7645728dbe01197a0430e9ddf12ac03a3df6"} Jan 04 12:07:11 crc kubenswrapper[5003]: I0104 12:07:11.595775 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 04 12:07:11 crc kubenswrapper[5003]: I0104 12:07:11.598749 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:11 crc kubenswrapper[5003]: I0104 12:07:11.601302 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-rbj96" Jan 04 12:07:11 crc kubenswrapper[5003]: I0104 12:07:11.601472 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Jan 04 12:07:11 crc kubenswrapper[5003]: I0104 12:07:11.601980 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Jan 04 12:07:11 crc kubenswrapper[5003]: I0104 12:07:11.604394 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Jan 04 12:07:11 crc kubenswrapper[5003]: I0104 12:07:11.609133 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 04 12:07:11 crc kubenswrapper[5003]: I0104 12:07:11.718900 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:11 crc kubenswrapper[5003]: I0104 12:07:11.718950 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npvx4\" (UniqueName: \"kubernetes.io/projected/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-kube-api-access-npvx4\") pod \"openstack-cell1-galera-0\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:11 crc kubenswrapper[5003]: I0104 12:07:11.718971 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:11 crc kubenswrapper[5003]: I0104 12:07:11.718988 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:11 crc kubenswrapper[5003]: I0104 12:07:11.719035 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:11 crc kubenswrapper[5003]: I0104 12:07:11.719064 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:11 crc kubenswrapper[5003]: I0104 12:07:11.719087 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:11 crc kubenswrapper[5003]: I0104 12:07:11.719110 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:11 crc kubenswrapper[5003]: I0104 12:07:11.846901 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:11 crc kubenswrapper[5003]: I0104 12:07:11.846942 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npvx4\" (UniqueName: \"kubernetes.io/projected/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-kube-api-access-npvx4\") pod \"openstack-cell1-galera-0\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:11 crc kubenswrapper[5003]: I0104 12:07:11.846963 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:11 crc kubenswrapper[5003]: I0104 12:07:11.846985 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:11 crc kubenswrapper[5003]: I0104 12:07:11.847030 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:11 crc kubenswrapper[5003]: I0104 12:07:11.847055 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:11 crc kubenswrapper[5003]: I0104 12:07:11.847079 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:11 crc kubenswrapper[5003]: I0104 12:07:11.847097 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:11 crc kubenswrapper[5003]: I0104 12:07:11.847473 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:11 crc kubenswrapper[5003]: I0104 12:07:11.858037 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:12 crc kubenswrapper[5003]: I0104 12:07:11.995220 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:12 crc kubenswrapper[5003]: I0104 12:07:11.998653 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:12 crc kubenswrapper[5003]: I0104 12:07:12.027793 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:12 crc kubenswrapper[5003]: I0104 12:07:12.103319 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npvx4\" (UniqueName: \"kubernetes.io/projected/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-kube-api-access-npvx4\") pod \"openstack-cell1-galera-0\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:12 crc kubenswrapper[5003]: I0104 12:07:12.107740 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:12 crc kubenswrapper[5003]: I0104 12:07:12.121454 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:12 crc kubenswrapper[5003]: I0104 12:07:12.144463 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") " pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:12 crc kubenswrapper[5003]: I0104 12:07:12.195128 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Jan 04 12:07:12 crc kubenswrapper[5003]: I0104 12:07:12.196486 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 04 12:07:12 crc kubenswrapper[5003]: I0104 12:07:12.200246 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Jan 04 12:07:12 crc kubenswrapper[5003]: I0104 12:07:12.200411 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-knxt9" Jan 04 12:07:12 crc kubenswrapper[5003]: I0104 12:07:12.207898 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Jan 04 12:07:12 crc kubenswrapper[5003]: I0104 12:07:12.244056 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 04 12:07:12 crc kubenswrapper[5003]: I0104 12:07:12.258346 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Jan 04 12:07:12 crc kubenswrapper[5003]: I0104 12:07:12.309217 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/274fbbf3-b927-408e-9594-946f6ea71638-kolla-config\") pod \"memcached-0\" (UID: \"274fbbf3-b927-408e-9594-946f6ea71638\") " pod="openstack/memcached-0" Jan 04 12:07:12 crc kubenswrapper[5003]: I0104 12:07:12.309305 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/274fbbf3-b927-408e-9594-946f6ea71638-combined-ca-bundle\") pod \"memcached-0\" (UID: \"274fbbf3-b927-408e-9594-946f6ea71638\") " pod="openstack/memcached-0" Jan 04 12:07:12 crc kubenswrapper[5003]: I0104 12:07:12.309339 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/274fbbf3-b927-408e-9594-946f6ea71638-config-data\") pod \"memcached-0\" (UID: \"274fbbf3-b927-408e-9594-946f6ea71638\") " pod="openstack/memcached-0" Jan 04 12:07:12 crc kubenswrapper[5003]: I0104 12:07:12.309367 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmgvl\" (UniqueName: \"kubernetes.io/projected/274fbbf3-b927-408e-9594-946f6ea71638-kube-api-access-dmgvl\") pod \"memcached-0\" (UID: \"274fbbf3-b927-408e-9594-946f6ea71638\") " pod="openstack/memcached-0" Jan 04 12:07:12 crc kubenswrapper[5003]: I0104 12:07:12.309409 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/274fbbf3-b927-408e-9594-946f6ea71638-memcached-tls-certs\") pod \"memcached-0\" (UID: \"274fbbf3-b927-408e-9594-946f6ea71638\") " pod="openstack/memcached-0" Jan 04 12:07:12 crc kubenswrapper[5003]: I0104 12:07:12.362546 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9","Type":"ContainerStarted","Data":"820ed0f4b8611cfdb78035e27df4f2a9fff7e8716ead65a412cd9e625a125567"} Jan 04 12:07:12 crc kubenswrapper[5003]: I0104 12:07:12.411136 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/274fbbf3-b927-408e-9594-946f6ea71638-combined-ca-bundle\") pod \"memcached-0\" (UID: \"274fbbf3-b927-408e-9594-946f6ea71638\") " pod="openstack/memcached-0" Jan 04 12:07:12 crc kubenswrapper[5003]: I0104 12:07:12.411225 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/274fbbf3-b927-408e-9594-946f6ea71638-config-data\") pod \"memcached-0\" (UID: \"274fbbf3-b927-408e-9594-946f6ea71638\") " pod="openstack/memcached-0" Jan 04 12:07:12 crc kubenswrapper[5003]: I0104 12:07:12.411271 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmgvl\" (UniqueName: \"kubernetes.io/projected/274fbbf3-b927-408e-9594-946f6ea71638-kube-api-access-dmgvl\") pod \"memcached-0\" (UID: \"274fbbf3-b927-408e-9594-946f6ea71638\") " pod="openstack/memcached-0" Jan 04 12:07:12 crc kubenswrapper[5003]: I0104 12:07:12.411317 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/274fbbf3-b927-408e-9594-946f6ea71638-memcached-tls-certs\") pod \"memcached-0\" (UID: \"274fbbf3-b927-408e-9594-946f6ea71638\") " pod="openstack/memcached-0" Jan 04 12:07:12 crc kubenswrapper[5003]: I0104 12:07:12.411374 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/274fbbf3-b927-408e-9594-946f6ea71638-kolla-config\") pod \"memcached-0\" (UID: \"274fbbf3-b927-408e-9594-946f6ea71638\") " pod="openstack/memcached-0" Jan 04 12:07:12 crc kubenswrapper[5003]: I0104 12:07:12.412856 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/274fbbf3-b927-408e-9594-946f6ea71638-kolla-config\") pod \"memcached-0\" (UID: \"274fbbf3-b927-408e-9594-946f6ea71638\") " pod="openstack/memcached-0" Jan 04 12:07:12 crc kubenswrapper[5003]: I0104 12:07:12.414114 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/274fbbf3-b927-408e-9594-946f6ea71638-config-data\") pod \"memcached-0\" (UID: \"274fbbf3-b927-408e-9594-946f6ea71638\") " pod="openstack/memcached-0" Jan 04 12:07:12 crc kubenswrapper[5003]: I0104 12:07:12.416776 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/274fbbf3-b927-408e-9594-946f6ea71638-combined-ca-bundle\") pod \"memcached-0\" (UID: \"274fbbf3-b927-408e-9594-946f6ea71638\") " pod="openstack/memcached-0" Jan 04 12:07:12 crc kubenswrapper[5003]: I0104 12:07:12.417168 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/274fbbf3-b927-408e-9594-946f6ea71638-memcached-tls-certs\") pod \"memcached-0\" (UID: \"274fbbf3-b927-408e-9594-946f6ea71638\") " pod="openstack/memcached-0" Jan 04 12:07:12 crc kubenswrapper[5003]: I0104 12:07:12.437578 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmgvl\" (UniqueName: \"kubernetes.io/projected/274fbbf3-b927-408e-9594-946f6ea71638-kube-api-access-dmgvl\") pod \"memcached-0\" (UID: \"274fbbf3-b927-408e-9594-946f6ea71638\") " pod="openstack/memcached-0" Jan 04 12:07:12 crc kubenswrapper[5003]: I0104 12:07:12.584550 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 04 12:07:13 crc kubenswrapper[5003]: I0104 12:07:13.348833 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 04 12:07:13 crc kubenswrapper[5003]: I0104 12:07:13.382916 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Jan 04 12:07:13 crc kubenswrapper[5003]: W0104 12:07:13.405627 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode9f73829_d0a1_4e4d_8f5a_755d63ce1caa.slice/crio-3b572644d222a01c860c65908d6859ffdf8d098dfbaafcc99069c8317edcd524 WatchSource:0}: Error finding container 3b572644d222a01c860c65908d6859ffdf8d098dfbaafcc99069c8317edcd524: Status 404 returned error can't find the container with id 3b572644d222a01c860c65908d6859ffdf8d098dfbaafcc99069c8317edcd524 Jan 04 12:07:13 crc kubenswrapper[5003]: W0104 12:07:13.445338 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod274fbbf3_b927_408e_9594_946f6ea71638.slice/crio-dae14417523a45eccdb53cb7dc7ce6b65dc4fc384cb328271d9f37fc0ac3aefd WatchSource:0}: Error finding container dae14417523a45eccdb53cb7dc7ce6b65dc4fc384cb328271d9f37fc0ac3aefd: Status 404 returned error can't find the container with id dae14417523a45eccdb53cb7dc7ce6b65dc4fc384cb328271d9f37fc0ac3aefd Jan 04 12:07:13 crc kubenswrapper[5003]: I0104 12:07:13.787488 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Jan 04 12:07:13 crc kubenswrapper[5003]: I0104 12:07:13.788847 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 04 12:07:13 crc kubenswrapper[5003]: I0104 12:07:13.788927 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 04 12:07:13 crc kubenswrapper[5003]: I0104 12:07:13.816579 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j4kj4\" (UniqueName: \"kubernetes.io/projected/df21c365-a2e2-4a85-8de8-f132fd605981-kube-api-access-j4kj4\") pod \"kube-state-metrics-0\" (UID: \"df21c365-a2e2-4a85-8de8-f132fd605981\") " pod="openstack/kube-state-metrics-0" Jan 04 12:07:13 crc kubenswrapper[5003]: I0104 12:07:13.828835 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-7sq8v" Jan 04 12:07:13 crc kubenswrapper[5003]: I0104 12:07:13.918840 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j4kj4\" (UniqueName: \"kubernetes.io/projected/df21c365-a2e2-4a85-8de8-f132fd605981-kube-api-access-j4kj4\") pod \"kube-state-metrics-0\" (UID: \"df21c365-a2e2-4a85-8de8-f132fd605981\") " pod="openstack/kube-state-metrics-0" Jan 04 12:07:13 crc kubenswrapper[5003]: I0104 12:07:13.942771 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j4kj4\" (UniqueName: \"kubernetes.io/projected/df21c365-a2e2-4a85-8de8-f132fd605981-kube-api-access-j4kj4\") pod \"kube-state-metrics-0\" (UID: \"df21c365-a2e2-4a85-8de8-f132fd605981\") " pod="openstack/kube-state-metrics-0" Jan 04 12:07:14 crc kubenswrapper[5003]: I0104 12:07:14.129804 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 04 12:07:14 crc kubenswrapper[5003]: I0104 12:07:14.486441 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"274fbbf3-b927-408e-9594-946f6ea71638","Type":"ContainerStarted","Data":"dae14417523a45eccdb53cb7dc7ce6b65dc4fc384cb328271d9f37fc0ac3aefd"} Jan 04 12:07:14 crc kubenswrapper[5003]: I0104 12:07:14.490287 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa","Type":"ContainerStarted","Data":"3b572644d222a01c860c65908d6859ffdf8d098dfbaafcc99069c8317edcd524"} Jan 04 12:07:14 crc kubenswrapper[5003]: I0104 12:07:14.936673 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 04 12:07:15 crc kubenswrapper[5003]: I0104 12:07:15.524038 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"df21c365-a2e2-4a85-8de8-f132fd605981","Type":"ContainerStarted","Data":"e8c4df52246c759f8a562a0e4cbeb7f7b5c2b3b130f46fe0791f7ef9e62ac095"} Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.010324 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-fdswd"] Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.012062 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-fdswd" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.017359 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-xc4md" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.018924 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-fdswd"] Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.023379 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.023671 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.103321 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-scripts\") pod \"ovn-controller-fdswd\" (UID: \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\") " pod="openstack/ovn-controller-fdswd" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.103381 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-var-log-ovn\") pod \"ovn-controller-fdswd\" (UID: \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\") " pod="openstack/ovn-controller-fdswd" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.103401 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-combined-ca-bundle\") pod \"ovn-controller-fdswd\" (UID: \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\") " pod="openstack/ovn-controller-fdswd" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.103624 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwps5\" (UniqueName: \"kubernetes.io/projected/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-kube-api-access-nwps5\") pod \"ovn-controller-fdswd\" (UID: \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\") " pod="openstack/ovn-controller-fdswd" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.103653 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-var-run-ovn\") pod \"ovn-controller-fdswd\" (UID: \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\") " pod="openstack/ovn-controller-fdswd" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.103680 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-var-run\") pod \"ovn-controller-fdswd\" (UID: \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\") " pod="openstack/ovn-controller-fdswd" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.103719 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-ovn-controller-tls-certs\") pod \"ovn-controller-fdswd\" (UID: \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\") " pod="openstack/ovn-controller-fdswd" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.132913 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-99mjg"] Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.136385 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-99mjg" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.204900 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-scripts\") pod \"ovn-controller-fdswd\" (UID: \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\") " pod="openstack/ovn-controller-fdswd" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.204988 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-var-log-ovn\") pod \"ovn-controller-fdswd\" (UID: \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\") " pod="openstack/ovn-controller-fdswd" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.205008 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-combined-ca-bundle\") pod \"ovn-controller-fdswd\" (UID: \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\") " pod="openstack/ovn-controller-fdswd" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.205059 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwps5\" (UniqueName: \"kubernetes.io/projected/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-kube-api-access-nwps5\") pod \"ovn-controller-fdswd\" (UID: \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\") " pod="openstack/ovn-controller-fdswd" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.205082 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-var-log\") pod \"ovn-controller-ovs-99mjg\" (UID: \"d91cd6f4-0e52-4519-b337-9a7c2779b7f1\") " pod="openstack/ovn-controller-ovs-99mjg" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.205111 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-var-run-ovn\") pod \"ovn-controller-fdswd\" (UID: \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\") " pod="openstack/ovn-controller-fdswd" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.205125 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-scripts\") pod \"ovn-controller-ovs-99mjg\" (UID: \"d91cd6f4-0e52-4519-b337-9a7c2779b7f1\") " pod="openstack/ovn-controller-ovs-99mjg" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.205148 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-var-run\") pod \"ovn-controller-fdswd\" (UID: \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\") " pod="openstack/ovn-controller-fdswd" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.205187 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-ovn-controller-tls-certs\") pod \"ovn-controller-fdswd\" (UID: \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\") " pod="openstack/ovn-controller-fdswd" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.205213 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-var-run\") pod \"ovn-controller-ovs-99mjg\" (UID: \"d91cd6f4-0e52-4519-b337-9a7c2779b7f1\") " pod="openstack/ovn-controller-ovs-99mjg" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.205230 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbjbn\" (UniqueName: \"kubernetes.io/projected/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-kube-api-access-fbjbn\") pod \"ovn-controller-ovs-99mjg\" (UID: \"d91cd6f4-0e52-4519-b337-9a7c2779b7f1\") " pod="openstack/ovn-controller-ovs-99mjg" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.205261 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-etc-ovs\") pod \"ovn-controller-ovs-99mjg\" (UID: \"d91cd6f4-0e52-4519-b337-9a7c2779b7f1\") " pod="openstack/ovn-controller-ovs-99mjg" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.205278 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-var-lib\") pod \"ovn-controller-ovs-99mjg\" (UID: \"d91cd6f4-0e52-4519-b337-9a7c2779b7f1\") " pod="openstack/ovn-controller-ovs-99mjg" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.206982 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-var-log-ovn\") pod \"ovn-controller-fdswd\" (UID: \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\") " pod="openstack/ovn-controller-fdswd" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.207274 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-var-run\") pod \"ovn-controller-fdswd\" (UID: \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\") " pod="openstack/ovn-controller-fdswd" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.207341 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-scripts\") pod \"ovn-controller-fdswd\" (UID: \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\") " pod="openstack/ovn-controller-fdswd" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.208761 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-var-run-ovn\") pod \"ovn-controller-fdswd\" (UID: \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\") " pod="openstack/ovn-controller-fdswd" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.236662 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-ovn-controller-tls-certs\") pod \"ovn-controller-fdswd\" (UID: \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\") " pod="openstack/ovn-controller-fdswd" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.245832 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-combined-ca-bundle\") pod \"ovn-controller-fdswd\" (UID: \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\") " pod="openstack/ovn-controller-fdswd" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.272127 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwps5\" (UniqueName: \"kubernetes.io/projected/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-kube-api-access-nwps5\") pod \"ovn-controller-fdswd\" (UID: \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\") " pod="openstack/ovn-controller-fdswd" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.274329 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-99mjg"] Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.306337 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-scripts\") pod \"ovn-controller-ovs-99mjg\" (UID: \"d91cd6f4-0e52-4519-b337-9a7c2779b7f1\") " pod="openstack/ovn-controller-ovs-99mjg" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.306421 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-var-run\") pod \"ovn-controller-ovs-99mjg\" (UID: \"d91cd6f4-0e52-4519-b337-9a7c2779b7f1\") " pod="openstack/ovn-controller-ovs-99mjg" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.306442 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbjbn\" (UniqueName: \"kubernetes.io/projected/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-kube-api-access-fbjbn\") pod \"ovn-controller-ovs-99mjg\" (UID: \"d91cd6f4-0e52-4519-b337-9a7c2779b7f1\") " pod="openstack/ovn-controller-ovs-99mjg" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.306471 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-etc-ovs\") pod \"ovn-controller-ovs-99mjg\" (UID: \"d91cd6f4-0e52-4519-b337-9a7c2779b7f1\") " pod="openstack/ovn-controller-ovs-99mjg" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.306489 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-var-lib\") pod \"ovn-controller-ovs-99mjg\" (UID: \"d91cd6f4-0e52-4519-b337-9a7c2779b7f1\") " pod="openstack/ovn-controller-ovs-99mjg" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.306540 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-var-log\") pod \"ovn-controller-ovs-99mjg\" (UID: \"d91cd6f4-0e52-4519-b337-9a7c2779b7f1\") " pod="openstack/ovn-controller-ovs-99mjg" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.306720 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-var-log\") pod \"ovn-controller-ovs-99mjg\" (UID: \"d91cd6f4-0e52-4519-b337-9a7c2779b7f1\") " pod="openstack/ovn-controller-ovs-99mjg" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.308512 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-scripts\") pod \"ovn-controller-ovs-99mjg\" (UID: \"d91cd6f4-0e52-4519-b337-9a7c2779b7f1\") " pod="openstack/ovn-controller-ovs-99mjg" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.308572 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-var-run\") pod \"ovn-controller-ovs-99mjg\" (UID: \"d91cd6f4-0e52-4519-b337-9a7c2779b7f1\") " pod="openstack/ovn-controller-ovs-99mjg" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.308929 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-etc-ovs\") pod \"ovn-controller-ovs-99mjg\" (UID: \"d91cd6f4-0e52-4519-b337-9a7c2779b7f1\") " pod="openstack/ovn-controller-ovs-99mjg" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.309052 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-var-lib\") pod \"ovn-controller-ovs-99mjg\" (UID: \"d91cd6f4-0e52-4519-b337-9a7c2779b7f1\") " pod="openstack/ovn-controller-ovs-99mjg" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.350331 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbjbn\" (UniqueName: \"kubernetes.io/projected/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-kube-api-access-fbjbn\") pod \"ovn-controller-ovs-99mjg\" (UID: \"d91cd6f4-0e52-4519-b337-9a7c2779b7f1\") " pod="openstack/ovn-controller-ovs-99mjg" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.438859 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-fdswd" Jan 04 12:07:17 crc kubenswrapper[5003]: I0104 12:07:17.483871 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-99mjg" Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.539481 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.541775 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.549149 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.581396 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.581757 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.583314 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.584822 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.586177 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-xvhnx" Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.653304 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5c96797-3987-489a-8ce2-510caa11262c-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.653373 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.653423 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5c96797-3987-489a-8ce2-510caa11262c-config\") pod \"ovsdbserver-nb-0\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.653476 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c5c96797-3987-489a-8ce2-510caa11262c-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.653516 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c5c96797-3987-489a-8ce2-510caa11262c-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.653540 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5c96797-3987-489a-8ce2-510caa11262c-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.653591 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5c96797-3987-489a-8ce2-510caa11262c-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.653637 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b67jl\" (UniqueName: \"kubernetes.io/projected/c5c96797-3987-489a-8ce2-510caa11262c-kube-api-access-b67jl\") pod \"ovsdbserver-nb-0\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.773734 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b67jl\" (UniqueName: \"kubernetes.io/projected/c5c96797-3987-489a-8ce2-510caa11262c-kube-api-access-b67jl\") pod \"ovsdbserver-nb-0\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.773851 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5c96797-3987-489a-8ce2-510caa11262c-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.773903 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.773945 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5c96797-3987-489a-8ce2-510caa11262c-config\") pod \"ovsdbserver-nb-0\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.774001 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c5c96797-3987-489a-8ce2-510caa11262c-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.774078 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c5c96797-3987-489a-8ce2-510caa11262c-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.774095 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5c96797-3987-489a-8ce2-510caa11262c-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.774151 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5c96797-3987-489a-8ce2-510caa11262c-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.775182 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.775556 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c5c96797-3987-489a-8ce2-510caa11262c-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.776153 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5c96797-3987-489a-8ce2-510caa11262c-config\") pod \"ovsdbserver-nb-0\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.776466 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c5c96797-3987-489a-8ce2-510caa11262c-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.778892 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5c96797-3987-489a-8ce2-510caa11262c-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.783694 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5c96797-3987-489a-8ce2-510caa11262c-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.787112 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5c96797-3987-489a-8ce2-510caa11262c-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.793842 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b67jl\" (UniqueName: \"kubernetes.io/projected/c5c96797-3987-489a-8ce2-510caa11262c-kube-api-access-b67jl\") pod \"ovsdbserver-nb-0\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.842670 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") " pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:18 crc kubenswrapper[5003]: I0104 12:07:18.938098 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 04 12:07:20 crc kubenswrapper[5003]: I0104 12:07:20.968212 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 04 12:07:20 crc kubenswrapper[5003]: I0104 12:07:20.969924 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:20 crc kubenswrapper[5003]: I0104 12:07:20.977284 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Jan 04 12:07:20 crc kubenswrapper[5003]: I0104 12:07:20.977357 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Jan 04 12:07:20 crc kubenswrapper[5003]: I0104 12:07:20.977405 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Jan 04 12:07:20 crc kubenswrapper[5003]: I0104 12:07:20.978938 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-686zz" Jan 04 12:07:20 crc kubenswrapper[5003]: I0104 12:07:20.986608 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 04 12:07:21 crc kubenswrapper[5003]: I0104 12:07:21.088001 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/741d1d6f-4c11-4352-ba73-c8a13a465c95-config\") pod \"ovsdbserver-sb-0\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:21 crc kubenswrapper[5003]: I0104 12:07:21.088111 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/741d1d6f-4c11-4352-ba73-c8a13a465c95-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:21 crc kubenswrapper[5003]: I0104 12:07:21.088155 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nffl6\" (UniqueName: \"kubernetes.io/projected/741d1d6f-4c11-4352-ba73-c8a13a465c95-kube-api-access-nffl6\") pod \"ovsdbserver-sb-0\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:21 crc kubenswrapper[5003]: I0104 12:07:21.088186 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/741d1d6f-4c11-4352-ba73-c8a13a465c95-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:21 crc kubenswrapper[5003]: I0104 12:07:21.088243 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/741d1d6f-4c11-4352-ba73-c8a13a465c95-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:21 crc kubenswrapper[5003]: I0104 12:07:21.088293 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:21 crc kubenswrapper[5003]: I0104 12:07:21.089655 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/741d1d6f-4c11-4352-ba73-c8a13a465c95-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:21 crc kubenswrapper[5003]: I0104 12:07:21.089745 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/741d1d6f-4c11-4352-ba73-c8a13a465c95-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:21 crc kubenswrapper[5003]: I0104 12:07:21.191322 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/741d1d6f-4c11-4352-ba73-c8a13a465c95-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:21 crc kubenswrapper[5003]: I0104 12:07:21.191378 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/741d1d6f-4c11-4352-ba73-c8a13a465c95-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:21 crc kubenswrapper[5003]: I0104 12:07:21.191414 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/741d1d6f-4c11-4352-ba73-c8a13a465c95-config\") pod \"ovsdbserver-sb-0\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:21 crc kubenswrapper[5003]: I0104 12:07:21.191454 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/741d1d6f-4c11-4352-ba73-c8a13a465c95-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:21 crc kubenswrapper[5003]: I0104 12:07:21.191503 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nffl6\" (UniqueName: \"kubernetes.io/projected/741d1d6f-4c11-4352-ba73-c8a13a465c95-kube-api-access-nffl6\") pod \"ovsdbserver-sb-0\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:21 crc kubenswrapper[5003]: I0104 12:07:21.191529 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/741d1d6f-4c11-4352-ba73-c8a13a465c95-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:21 crc kubenswrapper[5003]: I0104 12:07:21.191579 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/741d1d6f-4c11-4352-ba73-c8a13a465c95-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:21 crc kubenswrapper[5003]: I0104 12:07:21.191598 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:21 crc kubenswrapper[5003]: I0104 12:07:21.191907 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:21 crc kubenswrapper[5003]: I0104 12:07:21.194826 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/741d1d6f-4c11-4352-ba73-c8a13a465c95-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:21 crc kubenswrapper[5003]: I0104 12:07:21.195005 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/741d1d6f-4c11-4352-ba73-c8a13a465c95-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:21 crc kubenswrapper[5003]: I0104 12:07:21.198281 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/741d1d6f-4c11-4352-ba73-c8a13a465c95-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:21 crc kubenswrapper[5003]: I0104 12:07:21.199515 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/741d1d6f-4c11-4352-ba73-c8a13a465c95-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:21 crc kubenswrapper[5003]: I0104 12:07:21.200316 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/741d1d6f-4c11-4352-ba73-c8a13a465c95-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:21 crc kubenswrapper[5003]: I0104 12:07:21.206132 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/741d1d6f-4c11-4352-ba73-c8a13a465c95-config\") pod \"ovsdbserver-sb-0\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:21 crc kubenswrapper[5003]: I0104 12:07:21.226200 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nffl6\" (UniqueName: \"kubernetes.io/projected/741d1d6f-4c11-4352-ba73-c8a13a465c95-kube-api-access-nffl6\") pod \"ovsdbserver-sb-0\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:21 crc kubenswrapper[5003]: I0104 12:07:21.232053 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"ovsdbserver-sb-0\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") " pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:21 crc kubenswrapper[5003]: I0104 12:07:21.335832 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 04 12:07:39 crc kubenswrapper[5003]: I0104 12:07:39.418504 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:07:39 crc kubenswrapper[5003]: I0104 12:07:39.419360 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:07:39 crc kubenswrapper[5003]: I0104 12:07:39.419420 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 12:07:39 crc kubenswrapper[5003]: I0104 12:07:39.420443 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8805b4e8959ecfe4ed6d4f63f07630a7e965c7249b0c281ceae8ee8943118856"} pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 12:07:39 crc kubenswrapper[5003]: I0104 12:07:39.420520 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" containerID="cri-o://8805b4e8959ecfe4ed6d4f63f07630a7e965c7249b0c281ceae8ee8943118856" gracePeriod=600 Jan 04 12:07:39 crc kubenswrapper[5003]: I0104 12:07:39.842151 5003 generic.go:334] "Generic (PLEG): container finished" podID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerID="8805b4e8959ecfe4ed6d4f63f07630a7e965c7249b0c281ceae8ee8943118856" exitCode=0 Jan 04 12:07:39 crc kubenswrapper[5003]: I0104 12:07:39.842221 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerDied","Data":"8805b4e8959ecfe4ed6d4f63f07630a7e965c7249b0c281ceae8ee8943118856"} Jan 04 12:07:39 crc kubenswrapper[5003]: I0104 12:07:39.842841 5003 scope.go:117] "RemoveContainer" containerID="4118d04e178a916ef0fb795859c3a8da20b43a18e967d67161d5ece95b07366c" Jan 04 12:07:47 crc kubenswrapper[5003]: E0104 12:07:47.358216 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13" Jan 04 12:07:47 crc kubenswrapper[5003]: E0104 12:07:47.358973 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-npvx4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-cell1-galera-0_openstack(e9f73829-d0a1-4e4d-8f5a-755d63ce1caa): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:07:47 crc kubenswrapper[5003]: E0104 12:07:47.360315 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-cell1-galera-0" podUID="e9f73829-d0a1-4e4d-8f5a-755d63ce1caa" Jan 04 12:07:47 crc kubenswrapper[5003]: E0104 12:07:47.394652 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13" Jan 04 12:07:47 crc kubenswrapper[5003]: E0104 12:07:47.394871 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4rkz6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_openstack(30c47e0c-622e-4f66-a71d-f7e6cc0f23d9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:07:47 crc kubenswrapper[5003]: E0104 12:07:47.396892 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-galera-0" podUID="30c47e0c-622e-4f66-a71d-f7e6cc0f23d9" Jan 04 12:07:47 crc kubenswrapper[5003]: E0104 12:07:47.977427 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13\\\"\"" pod="openstack/openstack-galera-0" podUID="30c47e0c-622e-4f66-a71d-f7e6cc0f23d9" Jan 04 12:07:47 crc kubenswrapper[5003]: E0104 12:07:47.977524 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13\\\"\"" pod="openstack/openstack-cell1-galera-0" podUID="e9f73829-d0a1-4e4d-8f5a-755d63ce1caa" Jan 04 12:07:52 crc kubenswrapper[5003]: E0104 12:07:52.950480 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:e733252aab7f4bc0efbdd712bcd88e44c5498bf1773dba843bc9dcfac324fe3d" Jan 04 12:07:52 crc kubenswrapper[5003]: E0104 12:07:52.952556 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:e733252aab7f4bc0efbdd712bcd88e44c5498bf1773dba843bc9dcfac324fe3d,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vr8lh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(829003dc-aa5e-43a6-a4f5-c578c73e76d4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:07:52 crc kubenswrapper[5003]: E0104 12:07:52.953941 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="829003dc-aa5e-43a6-a4f5-c578c73e76d4" Jan 04 12:07:53 crc kubenswrapper[5003]: E0104 12:07:53.008691 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:e733252aab7f4bc0efbdd712bcd88e44c5498bf1773dba843bc9dcfac324fe3d\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="829003dc-aa5e-43a6-a4f5-c578c73e76d4" Jan 04 12:07:53 crc kubenswrapper[5003]: E0104 12:07:53.831814 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-memcached@sha256:e47191ba776414b781b3e27b856ab45a03b9480c7dc2b1addb939608794882dc" Jan 04 12:07:53 crc kubenswrapper[5003]: E0104 12:07:53.832507 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:memcached,Image:quay.io/podified-antelope-centos9/openstack-memcached@sha256:e47191ba776414b781b3e27b856ab45a03b9480c7dc2b1addb939608794882dc,Command:[/usr/bin/dumb-init -- /usr/local/bin/kolla_start],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:memcached,HostPort:0,ContainerPort:11211,Protocol:TCP,HostIP:,},ContainerPort{Name:memcached-tls,HostPort:0,ContainerPort:11212,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:POD_IPS,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIPs,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:CONFIG_HASH,Value:n697hcfh5dfh5fdh58dh57bh78h65bh66chc7hffh5d7h557hb7h66dh55h78h577h5f9h64dh58fh58fhb8hdh664hdfh67ch5c7h65h5dfh55ch56fq,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/src,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/certs/memcached.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/private/memcached.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dmgvl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42457,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42457,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod memcached-0_openstack(274fbbf3-b927-408e-9594-946f6ea71638): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:07:53 crc kubenswrapper[5003]: E0104 12:07:53.833824 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/memcached-0" podUID="274fbbf3-b927-408e-9594-946f6ea71638" Jan 04 12:07:53 crc kubenswrapper[5003]: E0104 12:07:53.847949 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:e733252aab7f4bc0efbdd712bcd88e44c5498bf1773dba843bc9dcfac324fe3d" Jan 04 12:07:53 crc kubenswrapper[5003]: E0104 12:07:53.848805 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:e733252aab7f4bc0efbdd712bcd88e44c5498bf1773dba843bc9dcfac324fe3d,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-x6t42,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(81193935-fcd0-4877-9d65-6155c1a888e2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:07:53 crc kubenswrapper[5003]: E0104 12:07:53.850827 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="81193935-fcd0-4877-9d65-6155c1a888e2" Jan 04 12:07:54 crc kubenswrapper[5003]: E0104 12:07:54.018101 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:e733252aab7f4bc0efbdd712bcd88e44c5498bf1773dba843bc9dcfac324fe3d\\\"\"" pod="openstack/rabbitmq-server-0" podUID="81193935-fcd0-4877-9d65-6155c1a888e2" Jan 04 12:07:54 crc kubenswrapper[5003]: E0104 12:07:54.018376 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-memcached@sha256:e47191ba776414b781b3e27b856ab45a03b9480c7dc2b1addb939608794882dc\\\"\"" pod="openstack/memcached-0" podUID="274fbbf3-b927-408e-9594-946f6ea71638" Jan 04 12:07:54 crc kubenswrapper[5003]: E0104 12:07:54.629228 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33" Jan 04 12:07:54 crc kubenswrapper[5003]: E0104 12:07:54.629414 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-g46jr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-84bb9d8bd9-ngz7q_openstack(86288312-c398-4402-a6dc-1f2a2fd75e0d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:07:54 crc kubenswrapper[5003]: E0104 12:07:54.631337 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-84bb9d8bd9-ngz7q" podUID="86288312-c398-4402-a6dc-1f2a2fd75e0d" Jan 04 12:07:54 crc kubenswrapper[5003]: E0104 12:07:54.656867 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33" Jan 04 12:07:54 crc kubenswrapper[5003]: E0104 12:07:54.656895 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33" Jan 04 12:07:54 crc kubenswrapper[5003]: E0104 12:07:54.656928 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33" Jan 04 12:07:54 crc kubenswrapper[5003]: E0104 12:07:54.657096 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gpnrm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-5f854695bc-bgtgw_openstack(3f93d660-567b-4875-91f4-a2609d8a0ab8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:07:54 crc kubenswrapper[5003]: E0104 12:07:54.657166 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6g2fr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-744ffd65bc-w5hkf_openstack(1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:07:54 crc kubenswrapper[5003]: E0104 12:07:54.657452 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6bdxr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-95f5f6995-4fhgm_openstack(21282961-7d45-4f5f-8e71-b7d369b23110): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:07:54 crc kubenswrapper[5003]: E0104 12:07:54.658258 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-744ffd65bc-w5hkf" podUID="1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637" Jan 04 12:07:54 crc kubenswrapper[5003]: E0104 12:07:54.658316 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-5f854695bc-bgtgw" podUID="3f93d660-567b-4875-91f4-a2609d8a0ab8" Jan 04 12:07:54 crc kubenswrapper[5003]: E0104 12:07:54.659065 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-95f5f6995-4fhgm" podUID="21282961-7d45-4f5f-8e71-b7d369b23110" Jan 04 12:07:55 crc kubenswrapper[5003]: E0104 12:07:55.037130 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33\\\"\"" pod="openstack/dnsmasq-dns-744ffd65bc-w5hkf" podUID="1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637" Jan 04 12:07:55 crc kubenswrapper[5003]: E0104 12:07:55.037557 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33\\\"\"" pod="openstack/dnsmasq-dns-95f5f6995-4fhgm" podUID="21282961-7d45-4f5f-8e71-b7d369b23110" Jan 04 12:07:55 crc kubenswrapper[5003]: E0104 12:07:55.169313 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: reading blob sha256:9aee425378d2c16cd44177dc54a274b312897f5860a8e78fdfda555a0d79dd71: Get \"https://registry.k8s.io/v2/kube-state-metrics/kube-state-metrics/blobs/sha256:9aee425378d2c16cd44177dc54a274b312897f5860a8e78fdfda555a0d79dd71\": context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics@sha256:db384bf43222b066c378e77027a675d4cd9911107adba46c2922b3a55e10d6fb" Jan 04 12:07:55 crc kubenswrapper[5003]: E0104 12:07:55.169373 5003 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying system image from manifest list: reading blob sha256:9aee425378d2c16cd44177dc54a274b312897f5860a8e78fdfda555a0d79dd71: Get \"https://registry.k8s.io/v2/kube-state-metrics/kube-state-metrics/blobs/sha256:9aee425378d2c16cd44177dc54a274b312897f5860a8e78fdfda555a0d79dd71\": context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics@sha256:db384bf43222b066c378e77027a675d4cd9911107adba46c2922b3a55e10d6fb" Jan 04 12:07:55 crc kubenswrapper[5003]: E0104 12:07:55.169525 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-state-metrics,Image:registry.k8s.io/kube-state-metrics/kube-state-metrics@sha256:db384bf43222b066c378e77027a675d4cd9911107adba46c2922b3a55e10d6fb,Command:[],Args:[--resources=pods --namespaces=openstack],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http-metrics,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},ContainerPort{Name:telemetry,HostPort:0,ContainerPort:8081,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-j4kj4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/livez,Port:{0 8080 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod kube-state-metrics-0_openstack(df21c365-a2e2-4a85-8de8-f132fd605981): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: reading blob sha256:9aee425378d2c16cd44177dc54a274b312897f5860a8e78fdfda555a0d79dd71: Get \"https://registry.k8s.io/v2/kube-state-metrics/kube-state-metrics/blobs/sha256:9aee425378d2c16cd44177dc54a274b312897f5860a8e78fdfda555a0d79dd71\": context canceled" logger="UnhandledError" Jan 04 12:07:55 crc kubenswrapper[5003]: E0104 12:07:55.170867 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: reading blob sha256:9aee425378d2c16cd44177dc54a274b312897f5860a8e78fdfda555a0d79dd71: Get \\\"https://registry.k8s.io/v2/kube-state-metrics/kube-state-metrics/blobs/sha256:9aee425378d2c16cd44177dc54a274b312897f5860a8e78fdfda555a0d79dd71\\\": context canceled\"" pod="openstack/kube-state-metrics-0" podUID="df21c365-a2e2-4a85-8de8-f132fd605981" Jan 04 12:07:55 crc kubenswrapper[5003]: I0104 12:07:55.238363 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-fdswd"] Jan 04 12:07:55 crc kubenswrapper[5003]: W0104 12:07:55.246442 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc88e1443_25c4_4e67_83d0_e43cef2b2e5c.slice/crio-de2220ff26a429e84b4767bccde7669818a28ec0b00af9fb7a2df1716de5644a WatchSource:0}: Error finding container de2220ff26a429e84b4767bccde7669818a28ec0b00af9fb7a2df1716de5644a: Status 404 returned error can't find the container with id de2220ff26a429e84b4767bccde7669818a28ec0b00af9fb7a2df1716de5644a Jan 04 12:07:55 crc kubenswrapper[5003]: I0104 12:07:55.299503 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-99mjg"] Jan 04 12:07:55 crc kubenswrapper[5003]: I0104 12:07:55.393387 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 04 12:07:55 crc kubenswrapper[5003]: I0104 12:07:55.442982 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f854695bc-bgtgw" Jan 04 12:07:55 crc kubenswrapper[5003]: W0104 12:07:55.447332 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod741d1d6f_4c11_4352_ba73_c8a13a465c95.slice/crio-868d00493b83018409f362ca624c82a60bd3be74e47068073991468ba8f540f7 WatchSource:0}: Error finding container 868d00493b83018409f362ca624c82a60bd3be74e47068073991468ba8f540f7: Status 404 returned error can't find the container with id 868d00493b83018409f362ca624c82a60bd3be74e47068073991468ba8f540f7 Jan 04 12:07:55 crc kubenswrapper[5003]: I0104 12:07:55.449764 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 04 12:07:55 crc kubenswrapper[5003]: I0104 12:07:55.547673 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bb9d8bd9-ngz7q" Jan 04 12:07:55 crc kubenswrapper[5003]: I0104 12:07:55.636683 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gpnrm\" (UniqueName: \"kubernetes.io/projected/3f93d660-567b-4875-91f4-a2609d8a0ab8-kube-api-access-gpnrm\") pod \"3f93d660-567b-4875-91f4-a2609d8a0ab8\" (UID: \"3f93d660-567b-4875-91f4-a2609d8a0ab8\") " Jan 04 12:07:55 crc kubenswrapper[5003]: I0104 12:07:55.636826 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f93d660-567b-4875-91f4-a2609d8a0ab8-config\") pod \"3f93d660-567b-4875-91f4-a2609d8a0ab8\" (UID: \"3f93d660-567b-4875-91f4-a2609d8a0ab8\") " Jan 04 12:07:55 crc kubenswrapper[5003]: I0104 12:07:55.636925 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f93d660-567b-4875-91f4-a2609d8a0ab8-dns-svc\") pod \"3f93d660-567b-4875-91f4-a2609d8a0ab8\" (UID: \"3f93d660-567b-4875-91f4-a2609d8a0ab8\") " Jan 04 12:07:55 crc kubenswrapper[5003]: I0104 12:07:55.637695 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f93d660-567b-4875-91f4-a2609d8a0ab8-config" (OuterVolumeSpecName: "config") pod "3f93d660-567b-4875-91f4-a2609d8a0ab8" (UID: "3f93d660-567b-4875-91f4-a2609d8a0ab8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:07:55 crc kubenswrapper[5003]: I0104 12:07:55.637728 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f93d660-567b-4875-91f4-a2609d8a0ab8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3f93d660-567b-4875-91f4-a2609d8a0ab8" (UID: "3f93d660-567b-4875-91f4-a2609d8a0ab8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:07:55 crc kubenswrapper[5003]: I0104 12:07:55.644590 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f93d660-567b-4875-91f4-a2609d8a0ab8-kube-api-access-gpnrm" (OuterVolumeSpecName: "kube-api-access-gpnrm") pod "3f93d660-567b-4875-91f4-a2609d8a0ab8" (UID: "3f93d660-567b-4875-91f4-a2609d8a0ab8"). InnerVolumeSpecName "kube-api-access-gpnrm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:07:55 crc kubenswrapper[5003]: I0104 12:07:55.747405 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g46jr\" (UniqueName: \"kubernetes.io/projected/86288312-c398-4402-a6dc-1f2a2fd75e0d-kube-api-access-g46jr\") pod \"86288312-c398-4402-a6dc-1f2a2fd75e0d\" (UID: \"86288312-c398-4402-a6dc-1f2a2fd75e0d\") " Jan 04 12:07:55 crc kubenswrapper[5003]: I0104 12:07:55.747865 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86288312-c398-4402-a6dc-1f2a2fd75e0d-config\") pod \"86288312-c398-4402-a6dc-1f2a2fd75e0d\" (UID: \"86288312-c398-4402-a6dc-1f2a2fd75e0d\") " Jan 04 12:07:55 crc kubenswrapper[5003]: I0104 12:07:55.748476 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86288312-c398-4402-a6dc-1f2a2fd75e0d-config" (OuterVolumeSpecName: "config") pod "86288312-c398-4402-a6dc-1f2a2fd75e0d" (UID: "86288312-c398-4402-a6dc-1f2a2fd75e0d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:07:55 crc kubenswrapper[5003]: I0104 12:07:55.748513 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gpnrm\" (UniqueName: \"kubernetes.io/projected/3f93d660-567b-4875-91f4-a2609d8a0ab8-kube-api-access-gpnrm\") on node \"crc\" DevicePath \"\"" Jan 04 12:07:55 crc kubenswrapper[5003]: I0104 12:07:55.748547 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f93d660-567b-4875-91f4-a2609d8a0ab8-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:07:55 crc kubenswrapper[5003]: I0104 12:07:55.748574 5003 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f93d660-567b-4875-91f4-a2609d8a0ab8-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 12:07:55 crc kubenswrapper[5003]: I0104 12:07:55.753976 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86288312-c398-4402-a6dc-1f2a2fd75e0d-kube-api-access-g46jr" (OuterVolumeSpecName: "kube-api-access-g46jr") pod "86288312-c398-4402-a6dc-1f2a2fd75e0d" (UID: "86288312-c398-4402-a6dc-1f2a2fd75e0d"). InnerVolumeSpecName "kube-api-access-g46jr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:07:55 crc kubenswrapper[5003]: I0104 12:07:55.850622 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86288312-c398-4402-a6dc-1f2a2fd75e0d-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:07:55 crc kubenswrapper[5003]: I0104 12:07:55.850655 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g46jr\" (UniqueName: \"kubernetes.io/projected/86288312-c398-4402-a6dc-1f2a2fd75e0d-kube-api-access-g46jr\") on node \"crc\" DevicePath \"\"" Jan 04 12:07:56 crc kubenswrapper[5003]: I0104 12:07:56.060655 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f854695bc-bgtgw" event={"ID":"3f93d660-567b-4875-91f4-a2609d8a0ab8","Type":"ContainerDied","Data":"36321367dccf32f3699b17b62ddbdfcbe59114d706ee113f468d465b2a3ab7b5"} Jan 04 12:07:56 crc kubenswrapper[5003]: I0104 12:07:56.060840 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f854695bc-bgtgw" Jan 04 12:07:56 crc kubenswrapper[5003]: I0104 12:07:56.062655 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"741d1d6f-4c11-4352-ba73-c8a13a465c95","Type":"ContainerStarted","Data":"868d00493b83018409f362ca624c82a60bd3be74e47068073991468ba8f540f7"} Jan 04 12:07:56 crc kubenswrapper[5003]: I0104 12:07:56.067463 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerStarted","Data":"69783d40bb0d702bd7a771a35f8a0c04b3ee78e8c80ee725ec241cde3249b382"} Jan 04 12:07:56 crc kubenswrapper[5003]: I0104 12:07:56.068438 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-99mjg" event={"ID":"d91cd6f4-0e52-4519-b337-9a7c2779b7f1","Type":"ContainerStarted","Data":"65eef8a07780b0e8e1ea149cf9c58ad22e62b06a8c2156fa8870f5b7decb667d"} Jan 04 12:07:56 crc kubenswrapper[5003]: I0104 12:07:56.069403 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-fdswd" event={"ID":"c88e1443-25c4-4e67-83d0-e43cef2b2e5c","Type":"ContainerStarted","Data":"de2220ff26a429e84b4767bccde7669818a28ec0b00af9fb7a2df1716de5644a"} Jan 04 12:07:56 crc kubenswrapper[5003]: I0104 12:07:56.072722 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"c5c96797-3987-489a-8ce2-510caa11262c","Type":"ContainerStarted","Data":"54fd4657c38950a0854eb471f11d031bb99b9b9b6c0705a5482cd75be4082fbc"} Jan 04 12:07:56 crc kubenswrapper[5003]: I0104 12:07:56.077053 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-84bb9d8bd9-ngz7q" Jan 04 12:07:56 crc kubenswrapper[5003]: I0104 12:07:56.077430 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-84bb9d8bd9-ngz7q" event={"ID":"86288312-c398-4402-a6dc-1f2a2fd75e0d","Type":"ContainerDied","Data":"9769d0f8a19da603f4ee4f4cb7b6db26c3acbde7d2db0c48d543217ab32a86c5"} Jan 04 12:07:56 crc kubenswrapper[5003]: E0104 12:07:56.080566 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.k8s.io/kube-state-metrics/kube-state-metrics@sha256:db384bf43222b066c378e77027a675d4cd9911107adba46c2922b3a55e10d6fb\\\"\"" pod="openstack/kube-state-metrics-0" podUID="df21c365-a2e2-4a85-8de8-f132fd605981" Jan 04 12:07:56 crc kubenswrapper[5003]: I0104 12:07:56.148314 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-bgtgw"] Jan 04 12:07:56 crc kubenswrapper[5003]: I0104 12:07:56.153943 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f854695bc-bgtgw"] Jan 04 12:07:56 crc kubenswrapper[5003]: I0104 12:07:56.185072 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-ngz7q"] Jan 04 12:07:56 crc kubenswrapper[5003]: I0104 12:07:56.190747 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-84bb9d8bd9-ngz7q"] Jan 04 12:07:56 crc kubenswrapper[5003]: I0104 12:07:56.826164 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f93d660-567b-4875-91f4-a2609d8a0ab8" path="/var/lib/kubelet/pods/3f93d660-567b-4875-91f4-a2609d8a0ab8/volumes" Jan 04 12:07:56 crc kubenswrapper[5003]: I0104 12:07:56.827341 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86288312-c398-4402-a6dc-1f2a2fd75e0d" path="/var/lib/kubelet/pods/86288312-c398-4402-a6dc-1f2a2fd75e0d/volumes" Jan 04 12:08:00 crc kubenswrapper[5003]: I0104 12:08:00.109140 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"741d1d6f-4c11-4352-ba73-c8a13a465c95","Type":"ContainerStarted","Data":"2a3d8a474b7ca0e0e5332e407803134102267d6de0d471b1d7bfcb2f97eaa762"} Jan 04 12:08:00 crc kubenswrapper[5003]: I0104 12:08:00.112390 5003 generic.go:334] "Generic (PLEG): container finished" podID="d91cd6f4-0e52-4519-b337-9a7c2779b7f1" containerID="e03d33b1c2f582207ad79b8ffd1fb1bc6fb9b3a8c8fbdcaf0b8afe9429602a63" exitCode=0 Jan 04 12:08:00 crc kubenswrapper[5003]: I0104 12:08:00.112510 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-99mjg" event={"ID":"d91cd6f4-0e52-4519-b337-9a7c2779b7f1","Type":"ContainerDied","Data":"e03d33b1c2f582207ad79b8ffd1fb1bc6fb9b3a8c8fbdcaf0b8afe9429602a63"} Jan 04 12:08:00 crc kubenswrapper[5003]: I0104 12:08:00.115953 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-fdswd" event={"ID":"c88e1443-25c4-4e67-83d0-e43cef2b2e5c","Type":"ContainerStarted","Data":"6388d2c6684a9b9e2ade7062fc176c21d7b16376b2604b8e0a56c525cd3036cb"} Jan 04 12:08:00 crc kubenswrapper[5003]: I0104 12:08:00.116136 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-fdswd" Jan 04 12:08:00 crc kubenswrapper[5003]: I0104 12:08:00.118204 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"c5c96797-3987-489a-8ce2-510caa11262c","Type":"ContainerStarted","Data":"5bfd4afff231ef79e9cf1ed25e3e862879baafe9a1abee867c1c87bf017c640d"} Jan 04 12:08:00 crc kubenswrapper[5003]: I0104 12:08:00.156896 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-fdswd" podStartSLOduration=40.251831697 podStartE2EDuration="44.156877355s" podCreationTimestamp="2026-01-04 12:07:16 +0000 UTC" firstStartedPulling="2026-01-04 12:07:55.24945589 +0000 UTC m=+1190.722485731" lastFinishedPulling="2026-01-04 12:07:59.154501548 +0000 UTC m=+1194.627531389" observedRunningTime="2026-01-04 12:08:00.152365158 +0000 UTC m=+1195.625394999" watchObservedRunningTime="2026-01-04 12:08:00.156877355 +0000 UTC m=+1195.629907196" Jan 04 12:08:01 crc kubenswrapper[5003]: I0104 12:08:01.135110 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-99mjg" event={"ID":"d91cd6f4-0e52-4519-b337-9a7c2779b7f1","Type":"ContainerStarted","Data":"dc2a9fd5a4e5efaaf4816f8b92204d68eeef36829b12562226d86ab9c2026294"} Jan 04 12:08:01 crc kubenswrapper[5003]: I0104 12:08:01.135532 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-99mjg" event={"ID":"d91cd6f4-0e52-4519-b337-9a7c2779b7f1","Type":"ContainerStarted","Data":"4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3"} Jan 04 12:08:01 crc kubenswrapper[5003]: I0104 12:08:01.136500 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-99mjg" Jan 04 12:08:01 crc kubenswrapper[5003]: I0104 12:08:01.136561 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-99mjg" Jan 04 12:08:01 crc kubenswrapper[5003]: I0104 12:08:01.168249 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-99mjg" podStartSLOduration=40.350694553 podStartE2EDuration="44.168230945s" podCreationTimestamp="2026-01-04 12:07:17 +0000 UTC" firstStartedPulling="2026-01-04 12:07:55.337825399 +0000 UTC m=+1190.810855240" lastFinishedPulling="2026-01-04 12:07:59.155361791 +0000 UTC m=+1194.628391632" observedRunningTime="2026-01-04 12:08:01.163912063 +0000 UTC m=+1196.636941934" watchObservedRunningTime="2026-01-04 12:08:01.168230945 +0000 UTC m=+1196.641260786" Jan 04 12:08:03 crc kubenswrapper[5003]: I0104 12:08:03.153438 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"741d1d6f-4c11-4352-ba73-c8a13a465c95","Type":"ContainerStarted","Data":"a681cbc3fe9ff43a2d77746a80489f681d1a0e3c6c0988ff7acac4e32cd4e452"} Jan 04 12:08:03 crc kubenswrapper[5003]: I0104 12:08:03.158863 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"c5c96797-3987-489a-8ce2-510caa11262c","Type":"ContainerStarted","Data":"94bf6e10e5febc0e0054c7eb1c6a15116c187661c05d7923c6e30f3126e4daba"} Jan 04 12:08:03 crc kubenswrapper[5003]: I0104 12:08:03.162280 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa","Type":"ContainerStarted","Data":"2a1ec64e5b0507e28d2e97d71cd1cc575cef24e7a7cb8c96be6fd1e98aabac73"} Jan 04 12:08:03 crc kubenswrapper[5003]: I0104 12:08:03.165121 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9","Type":"ContainerStarted","Data":"7cfabb243db2df216602d5fb89bb4ba8ccc3ab533869f71cb92afb81824f5638"} Jan 04 12:08:03 crc kubenswrapper[5003]: I0104 12:08:03.183560 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=37.507284182 podStartE2EDuration="44.183542924s" podCreationTimestamp="2026-01-04 12:07:19 +0000 UTC" firstStartedPulling="2026-01-04 12:07:55.450443768 +0000 UTC m=+1190.923473609" lastFinishedPulling="2026-01-04 12:08:02.12670251 +0000 UTC m=+1197.599732351" observedRunningTime="2026-01-04 12:08:03.182879956 +0000 UTC m=+1198.655909817" watchObservedRunningTime="2026-01-04 12:08:03.183542924 +0000 UTC m=+1198.656572755" Jan 04 12:08:03 crc kubenswrapper[5003]: I0104 12:08:03.260085 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=39.49067686 podStartE2EDuration="46.260065444s" podCreationTimestamp="2026-01-04 12:07:17 +0000 UTC" firstStartedPulling="2026-01-04 12:07:55.399453222 +0000 UTC m=+1190.872483063" lastFinishedPulling="2026-01-04 12:08:02.168841766 +0000 UTC m=+1197.641871647" observedRunningTime="2026-01-04 12:08:03.252841977 +0000 UTC m=+1198.725871858" watchObservedRunningTime="2026-01-04 12:08:03.260065444 +0000 UTC m=+1198.733095285" Jan 04 12:08:03 crc kubenswrapper[5003]: I0104 12:08:03.336398 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Jan 04 12:08:03 crc kubenswrapper[5003]: I0104 12:08:03.382362 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Jan 04 12:08:03 crc kubenswrapper[5003]: I0104 12:08:03.939391 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Jan 04 12:08:03 crc kubenswrapper[5003]: I0104 12:08:03.939676 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.042133 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.173416 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.207911 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.218117 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.471119 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-w5hkf"] Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.513536 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b79764b65-whgpg"] Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.514931 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b79764b65-whgpg" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.519166 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.522684 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b79764b65-whgpg"] Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.583920 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-ksmlf"] Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.585233 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-ksmlf" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.587896 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.598150 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-ksmlf"] Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.662349 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-config\") pod \"ovn-controller-metrics-ksmlf\" (UID: \"988b3ee2-147b-4ebc-9d31-42bdaf144bc5\") " pod="openstack/ovn-controller-metrics-ksmlf" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.662397 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vx2xq\" (UniqueName: \"kubernetes.io/projected/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-kube-api-access-vx2xq\") pod \"ovn-controller-metrics-ksmlf\" (UID: \"988b3ee2-147b-4ebc-9d31-42bdaf144bc5\") " pod="openstack/ovn-controller-metrics-ksmlf" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.662429 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8w2d8\" (UniqueName: \"kubernetes.io/projected/f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53-kube-api-access-8w2d8\") pod \"dnsmasq-dns-5b79764b65-whgpg\" (UID: \"f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53\") " pod="openstack/dnsmasq-dns-5b79764b65-whgpg" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.662631 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-combined-ca-bundle\") pod \"ovn-controller-metrics-ksmlf\" (UID: \"988b3ee2-147b-4ebc-9d31-42bdaf144bc5\") " pod="openstack/ovn-controller-metrics-ksmlf" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.662702 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53-dns-svc\") pod \"dnsmasq-dns-5b79764b65-whgpg\" (UID: \"f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53\") " pod="openstack/dnsmasq-dns-5b79764b65-whgpg" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.662741 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53-ovsdbserver-sb\") pod \"dnsmasq-dns-5b79764b65-whgpg\" (UID: \"f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53\") " pod="openstack/dnsmasq-dns-5b79764b65-whgpg" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.662787 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-ovn-rundir\") pod \"ovn-controller-metrics-ksmlf\" (UID: \"988b3ee2-147b-4ebc-9d31-42bdaf144bc5\") " pod="openstack/ovn-controller-metrics-ksmlf" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.662811 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-ovs-rundir\") pod \"ovn-controller-metrics-ksmlf\" (UID: \"988b3ee2-147b-4ebc-9d31-42bdaf144bc5\") " pod="openstack/ovn-controller-metrics-ksmlf" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.662828 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-ksmlf\" (UID: \"988b3ee2-147b-4ebc-9d31-42bdaf144bc5\") " pod="openstack/ovn-controller-metrics-ksmlf" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.662854 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53-config\") pod \"dnsmasq-dns-5b79764b65-whgpg\" (UID: \"f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53\") " pod="openstack/dnsmasq-dns-5b79764b65-whgpg" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.706089 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-4fhgm"] Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.749485 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.765524 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-combined-ca-bundle\") pod \"ovn-controller-metrics-ksmlf\" (UID: \"988b3ee2-147b-4ebc-9d31-42bdaf144bc5\") " pod="openstack/ovn-controller-metrics-ksmlf" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.765586 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53-dns-svc\") pod \"dnsmasq-dns-5b79764b65-whgpg\" (UID: \"f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53\") " pod="openstack/dnsmasq-dns-5b79764b65-whgpg" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.765617 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53-ovsdbserver-sb\") pod \"dnsmasq-dns-5b79764b65-whgpg\" (UID: \"f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53\") " pod="openstack/dnsmasq-dns-5b79764b65-whgpg" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.765662 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-ovn-rundir\") pod \"ovn-controller-metrics-ksmlf\" (UID: \"988b3ee2-147b-4ebc-9d31-42bdaf144bc5\") " pod="openstack/ovn-controller-metrics-ksmlf" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.765682 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-ovs-rundir\") pod \"ovn-controller-metrics-ksmlf\" (UID: \"988b3ee2-147b-4ebc-9d31-42bdaf144bc5\") " pod="openstack/ovn-controller-metrics-ksmlf" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.765702 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-ksmlf\" (UID: \"988b3ee2-147b-4ebc-9d31-42bdaf144bc5\") " pod="openstack/ovn-controller-metrics-ksmlf" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.765724 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53-config\") pod \"dnsmasq-dns-5b79764b65-whgpg\" (UID: \"f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53\") " pod="openstack/dnsmasq-dns-5b79764b65-whgpg" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.765784 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-config\") pod \"ovn-controller-metrics-ksmlf\" (UID: \"988b3ee2-147b-4ebc-9d31-42bdaf144bc5\") " pod="openstack/ovn-controller-metrics-ksmlf" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.765803 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vx2xq\" (UniqueName: \"kubernetes.io/projected/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-kube-api-access-vx2xq\") pod \"ovn-controller-metrics-ksmlf\" (UID: \"988b3ee2-147b-4ebc-9d31-42bdaf144bc5\") " pod="openstack/ovn-controller-metrics-ksmlf" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.765846 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8w2d8\" (UniqueName: \"kubernetes.io/projected/f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53-kube-api-access-8w2d8\") pod \"dnsmasq-dns-5b79764b65-whgpg\" (UID: \"f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53\") " pod="openstack/dnsmasq-dns-5b79764b65-whgpg" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.766394 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-ovs-rundir\") pod \"ovn-controller-metrics-ksmlf\" (UID: \"988b3ee2-147b-4ebc-9d31-42bdaf144bc5\") " pod="openstack/ovn-controller-metrics-ksmlf" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.767396 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53-config\") pod \"dnsmasq-dns-5b79764b65-whgpg\" (UID: \"f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53\") " pod="openstack/dnsmasq-dns-5b79764b65-whgpg" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.768434 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53-dns-svc\") pod \"dnsmasq-dns-5b79764b65-whgpg\" (UID: \"f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53\") " pod="openstack/dnsmasq-dns-5b79764b65-whgpg" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.768502 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-ovn-rundir\") pod \"ovn-controller-metrics-ksmlf\" (UID: \"988b3ee2-147b-4ebc-9d31-42bdaf144bc5\") " pod="openstack/ovn-controller-metrics-ksmlf" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.769761 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.778622 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.795372 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-combined-ca-bundle\") pod \"ovn-controller-metrics-ksmlf\" (UID: \"988b3ee2-147b-4ebc-9d31-42bdaf144bc5\") " pod="openstack/ovn-controller-metrics-ksmlf" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.800862 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-config\") pod \"ovn-controller-metrics-ksmlf\" (UID: \"988b3ee2-147b-4ebc-9d31-42bdaf144bc5\") " pod="openstack/ovn-controller-metrics-ksmlf" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.800935 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-rs66m"] Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.808271 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.808746 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.813269 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.813543 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.813839 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-ksmlf\" (UID: \"988b3ee2-147b-4ebc-9d31-42bdaf144bc5\") " pod="openstack/ovn-controller-metrics-ksmlf" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.815385 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53-ovsdbserver-sb\") pod \"dnsmasq-dns-5b79764b65-whgpg\" (UID: \"f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53\") " pod="openstack/dnsmasq-dns-5b79764b65-whgpg" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.815526 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-mvn8t" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.817766 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-rs66m"] Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.818966 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586b989cdc-rs66m" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.821328 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.825452 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vx2xq\" (UniqueName: \"kubernetes.io/projected/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-kube-api-access-vx2xq\") pod \"ovn-controller-metrics-ksmlf\" (UID: \"988b3ee2-147b-4ebc-9d31-42bdaf144bc5\") " pod="openstack/ovn-controller-metrics-ksmlf" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.837200 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8w2d8\" (UniqueName: \"kubernetes.io/projected/f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53-kube-api-access-8w2d8\") pod \"dnsmasq-dns-5b79764b65-whgpg\" (UID: \"f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53\") " pod="openstack/dnsmasq-dns-5b79764b65-whgpg" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.841539 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.869274 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/653b2e11-95ca-46e7-b28c-a1170d7a180b-config\") pod \"ovn-northd-0\" (UID: \"653b2e11-95ca-46e7-b28c-a1170d7a180b\") " pod="openstack/ovn-northd-0" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.869357 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/967309ce-cd9e-4966-87fd-da0912bf701a-ovsdbserver-nb\") pod \"dnsmasq-dns-586b989cdc-rs66m\" (UID: \"967309ce-cd9e-4966-87fd-da0912bf701a\") " pod="openstack/dnsmasq-dns-586b989cdc-rs66m" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.869384 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/967309ce-cd9e-4966-87fd-da0912bf701a-config\") pod \"dnsmasq-dns-586b989cdc-rs66m\" (UID: \"967309ce-cd9e-4966-87fd-da0912bf701a\") " pod="openstack/dnsmasq-dns-586b989cdc-rs66m" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.869403 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8j8k\" (UniqueName: \"kubernetes.io/projected/653b2e11-95ca-46e7-b28c-a1170d7a180b-kube-api-access-j8j8k\") pod \"ovn-northd-0\" (UID: \"653b2e11-95ca-46e7-b28c-a1170d7a180b\") " pod="openstack/ovn-northd-0" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.869425 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/653b2e11-95ca-46e7-b28c-a1170d7a180b-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"653b2e11-95ca-46e7-b28c-a1170d7a180b\") " pod="openstack/ovn-northd-0" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.869440 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/967309ce-cd9e-4966-87fd-da0912bf701a-ovsdbserver-sb\") pod \"dnsmasq-dns-586b989cdc-rs66m\" (UID: \"967309ce-cd9e-4966-87fd-da0912bf701a\") " pod="openstack/dnsmasq-dns-586b989cdc-rs66m" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.869462 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/653b2e11-95ca-46e7-b28c-a1170d7a180b-scripts\") pod \"ovn-northd-0\" (UID: \"653b2e11-95ca-46e7-b28c-a1170d7a180b\") " pod="openstack/ovn-northd-0" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.869512 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/653b2e11-95ca-46e7-b28c-a1170d7a180b-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"653b2e11-95ca-46e7-b28c-a1170d7a180b\") " pod="openstack/ovn-northd-0" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.869543 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/653b2e11-95ca-46e7-b28c-a1170d7a180b-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"653b2e11-95ca-46e7-b28c-a1170d7a180b\") " pod="openstack/ovn-northd-0" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.869568 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/967309ce-cd9e-4966-87fd-da0912bf701a-dns-svc\") pod \"dnsmasq-dns-586b989cdc-rs66m\" (UID: \"967309ce-cd9e-4966-87fd-da0912bf701a\") " pod="openstack/dnsmasq-dns-586b989cdc-rs66m" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.869590 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/653b2e11-95ca-46e7-b28c-a1170d7a180b-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"653b2e11-95ca-46e7-b28c-a1170d7a180b\") " pod="openstack/ovn-northd-0" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.869774 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vldrb\" (UniqueName: \"kubernetes.io/projected/967309ce-cd9e-4966-87fd-da0912bf701a-kube-api-access-vldrb\") pod \"dnsmasq-dns-586b989cdc-rs66m\" (UID: \"967309ce-cd9e-4966-87fd-da0912bf701a\") " pod="openstack/dnsmasq-dns-586b989cdc-rs66m" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.913526 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-ksmlf" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.976827 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/653b2e11-95ca-46e7-b28c-a1170d7a180b-config\") pod \"ovn-northd-0\" (UID: \"653b2e11-95ca-46e7-b28c-a1170d7a180b\") " pod="openstack/ovn-northd-0" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.978590 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/967309ce-cd9e-4966-87fd-da0912bf701a-ovsdbserver-nb\") pod \"dnsmasq-dns-586b989cdc-rs66m\" (UID: \"967309ce-cd9e-4966-87fd-da0912bf701a\") " pod="openstack/dnsmasq-dns-586b989cdc-rs66m" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.978620 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/967309ce-cd9e-4966-87fd-da0912bf701a-config\") pod \"dnsmasq-dns-586b989cdc-rs66m\" (UID: \"967309ce-cd9e-4966-87fd-da0912bf701a\") " pod="openstack/dnsmasq-dns-586b989cdc-rs66m" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.978638 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8j8k\" (UniqueName: \"kubernetes.io/projected/653b2e11-95ca-46e7-b28c-a1170d7a180b-kube-api-access-j8j8k\") pod \"ovn-northd-0\" (UID: \"653b2e11-95ca-46e7-b28c-a1170d7a180b\") " pod="openstack/ovn-northd-0" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.978776 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/653b2e11-95ca-46e7-b28c-a1170d7a180b-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"653b2e11-95ca-46e7-b28c-a1170d7a180b\") " pod="openstack/ovn-northd-0" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.978797 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/967309ce-cd9e-4966-87fd-da0912bf701a-ovsdbserver-sb\") pod \"dnsmasq-dns-586b989cdc-rs66m\" (UID: \"967309ce-cd9e-4966-87fd-da0912bf701a\") " pod="openstack/dnsmasq-dns-586b989cdc-rs66m" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.978828 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/653b2e11-95ca-46e7-b28c-a1170d7a180b-scripts\") pod \"ovn-northd-0\" (UID: \"653b2e11-95ca-46e7-b28c-a1170d7a180b\") " pod="openstack/ovn-northd-0" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.978874 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/653b2e11-95ca-46e7-b28c-a1170d7a180b-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"653b2e11-95ca-46e7-b28c-a1170d7a180b\") " pod="openstack/ovn-northd-0" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.978919 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/653b2e11-95ca-46e7-b28c-a1170d7a180b-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"653b2e11-95ca-46e7-b28c-a1170d7a180b\") " pod="openstack/ovn-northd-0" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.978943 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/967309ce-cd9e-4966-87fd-da0912bf701a-dns-svc\") pod \"dnsmasq-dns-586b989cdc-rs66m\" (UID: \"967309ce-cd9e-4966-87fd-da0912bf701a\") " pod="openstack/dnsmasq-dns-586b989cdc-rs66m" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.978965 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/653b2e11-95ca-46e7-b28c-a1170d7a180b-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"653b2e11-95ca-46e7-b28c-a1170d7a180b\") " pod="openstack/ovn-northd-0" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.979028 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vldrb\" (UniqueName: \"kubernetes.io/projected/967309ce-cd9e-4966-87fd-da0912bf701a-kube-api-access-vldrb\") pod \"dnsmasq-dns-586b989cdc-rs66m\" (UID: \"967309ce-cd9e-4966-87fd-da0912bf701a\") " pod="openstack/dnsmasq-dns-586b989cdc-rs66m" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.980413 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/967309ce-cd9e-4966-87fd-da0912bf701a-ovsdbserver-sb\") pod \"dnsmasq-dns-586b989cdc-rs66m\" (UID: \"967309ce-cd9e-4966-87fd-da0912bf701a\") " pod="openstack/dnsmasq-dns-586b989cdc-rs66m" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.980413 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/967309ce-cd9e-4966-87fd-da0912bf701a-dns-svc\") pod \"dnsmasq-dns-586b989cdc-rs66m\" (UID: \"967309ce-cd9e-4966-87fd-da0912bf701a\") " pod="openstack/dnsmasq-dns-586b989cdc-rs66m" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.980964 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/653b2e11-95ca-46e7-b28c-a1170d7a180b-scripts\") pod \"ovn-northd-0\" (UID: \"653b2e11-95ca-46e7-b28c-a1170d7a180b\") " pod="openstack/ovn-northd-0" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.981600 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/653b2e11-95ca-46e7-b28c-a1170d7a180b-config\") pod \"ovn-northd-0\" (UID: \"653b2e11-95ca-46e7-b28c-a1170d7a180b\") " pod="openstack/ovn-northd-0" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.982302 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/653b2e11-95ca-46e7-b28c-a1170d7a180b-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"653b2e11-95ca-46e7-b28c-a1170d7a180b\") " pod="openstack/ovn-northd-0" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.984665 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/967309ce-cd9e-4966-87fd-da0912bf701a-config\") pod \"dnsmasq-dns-586b989cdc-rs66m\" (UID: \"967309ce-cd9e-4966-87fd-da0912bf701a\") " pod="openstack/dnsmasq-dns-586b989cdc-rs66m" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.986365 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/653b2e11-95ca-46e7-b28c-a1170d7a180b-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"653b2e11-95ca-46e7-b28c-a1170d7a180b\") " pod="openstack/ovn-northd-0" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.986617 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/653b2e11-95ca-46e7-b28c-a1170d7a180b-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"653b2e11-95ca-46e7-b28c-a1170d7a180b\") " pod="openstack/ovn-northd-0" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.987774 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/653b2e11-95ca-46e7-b28c-a1170d7a180b-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"653b2e11-95ca-46e7-b28c-a1170d7a180b\") " pod="openstack/ovn-northd-0" Jan 04 12:08:04 crc kubenswrapper[5003]: I0104 12:08:04.989783 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/967309ce-cd9e-4966-87fd-da0912bf701a-ovsdbserver-nb\") pod \"dnsmasq-dns-586b989cdc-rs66m\" (UID: \"967309ce-cd9e-4966-87fd-da0912bf701a\") " pod="openstack/dnsmasq-dns-586b989cdc-rs66m" Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.004372 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vldrb\" (UniqueName: \"kubernetes.io/projected/967309ce-cd9e-4966-87fd-da0912bf701a-kube-api-access-vldrb\") pod \"dnsmasq-dns-586b989cdc-rs66m\" (UID: \"967309ce-cd9e-4966-87fd-da0912bf701a\") " pod="openstack/dnsmasq-dns-586b989cdc-rs66m" Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.005485 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8j8k\" (UniqueName: \"kubernetes.io/projected/653b2e11-95ca-46e7-b28c-a1170d7a180b-kube-api-access-j8j8k\") pod \"ovn-northd-0\" (UID: \"653b2e11-95ca-46e7-b28c-a1170d7a180b\") " pod="openstack/ovn-northd-0" Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.140455 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b79764b65-whgpg" Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.154920 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.165635 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586b989cdc-rs66m" Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.178565 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-744ffd65bc-w5hkf" Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.179088 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95f5f6995-4fhgm" Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.209997 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-744ffd65bc-w5hkf" event={"ID":"1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637","Type":"ContainerDied","Data":"5e1ebd688a3fc2693195c838e30757f2197aa65b6ac9170ac71d80e31a225d78"} Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.210104 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-744ffd65bc-w5hkf" Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.217403 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-95f5f6995-4fhgm" event={"ID":"21282961-7d45-4f5f-8e71-b7d369b23110","Type":"ContainerDied","Data":"9588bd1b50e0ebb9403c69b0700e74b08fb9ad421af9dee5d7a09eaa4bc31175"} Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.217585 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-95f5f6995-4fhgm" Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.310061 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/21282961-7d45-4f5f-8e71-b7d369b23110-dns-svc\") pod \"21282961-7d45-4f5f-8e71-b7d369b23110\" (UID: \"21282961-7d45-4f5f-8e71-b7d369b23110\") " Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.310208 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21282961-7d45-4f5f-8e71-b7d369b23110-config\") pod \"21282961-7d45-4f5f-8e71-b7d369b23110\" (UID: \"21282961-7d45-4f5f-8e71-b7d369b23110\") " Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.310264 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637-config\") pod \"1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637\" (UID: \"1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637\") " Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.310280 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637-dns-svc\") pod \"1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637\" (UID: \"1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637\") " Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.310352 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g2fr\" (UniqueName: \"kubernetes.io/projected/1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637-kube-api-access-6g2fr\") pod \"1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637\" (UID: \"1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637\") " Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.310381 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6bdxr\" (UniqueName: \"kubernetes.io/projected/21282961-7d45-4f5f-8e71-b7d369b23110-kube-api-access-6bdxr\") pod \"21282961-7d45-4f5f-8e71-b7d369b23110\" (UID: \"21282961-7d45-4f5f-8e71-b7d369b23110\") " Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.310628 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/21282961-7d45-4f5f-8e71-b7d369b23110-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "21282961-7d45-4f5f-8e71-b7d369b23110" (UID: "21282961-7d45-4f5f-8e71-b7d369b23110"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.311257 5003 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/21282961-7d45-4f5f-8e71-b7d369b23110-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.311627 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/21282961-7d45-4f5f-8e71-b7d369b23110-config" (OuterVolumeSpecName: "config") pod "21282961-7d45-4f5f-8e71-b7d369b23110" (UID: "21282961-7d45-4f5f-8e71-b7d369b23110"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.312460 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637-config" (OuterVolumeSpecName: "config") pod "1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637" (UID: "1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.312813 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637" (UID: "1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.414872 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21282961-7d45-4f5f-8e71-b7d369b23110-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.415268 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.415282 5003 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.473049 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637-kube-api-access-6g2fr" (OuterVolumeSpecName: "kube-api-access-6g2fr") pod "1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637" (UID: "1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637"). InnerVolumeSpecName "kube-api-access-6g2fr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.473621 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21282961-7d45-4f5f-8e71-b7d369b23110-kube-api-access-6bdxr" (OuterVolumeSpecName: "kube-api-access-6bdxr") pod "21282961-7d45-4f5f-8e71-b7d369b23110" (UID: "21282961-7d45-4f5f-8e71-b7d369b23110"). InnerVolumeSpecName "kube-api-access-6bdxr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.517535 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g2fr\" (UniqueName: \"kubernetes.io/projected/1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637-kube-api-access-6g2fr\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.517561 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6bdxr\" (UniqueName: \"kubernetes.io/projected/21282961-7d45-4f5f-8e71-b7d369b23110-kube-api-access-6bdxr\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.519367 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-ksmlf"] Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.653651 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-w5hkf"] Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.660132 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-744ffd65bc-w5hkf"] Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.676275 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-4fhgm"] Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.695981 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-95f5f6995-4fhgm"] Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.708405 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b79764b65-whgpg"] Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.791306 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-rs66m"] Jan 04 12:08:05 crc kubenswrapper[5003]: W0104 12:08:05.791742 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod653b2e11_95ca_46e7_b28c_a1170d7a180b.slice/crio-e3265df4c8826b4c9a574f0a412029cd57e6e0130fed8c6aa5ff21f2e9f8a346 WatchSource:0}: Error finding container e3265df4c8826b4c9a574f0a412029cd57e6e0130fed8c6aa5ff21f2e9f8a346: Status 404 returned error can't find the container with id e3265df4c8826b4c9a574f0a412029cd57e6e0130fed8c6aa5ff21f2e9f8a346 Jan 04 12:08:05 crc kubenswrapper[5003]: W0104 12:08:05.794424 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod967309ce_cd9e_4966_87fd_da0912bf701a.slice/crio-f10b995e43eec18faba6a9bc483cf5b19d2ebd3ca43784d911b6fc6f905d2947 WatchSource:0}: Error finding container f10b995e43eec18faba6a9bc483cf5b19d2ebd3ca43784d911b6fc6f905d2947: Status 404 returned error can't find the container with id f10b995e43eec18faba6a9bc483cf5b19d2ebd3ca43784d911b6fc6f905d2947 Jan 04 12:08:05 crc kubenswrapper[5003]: I0104 12:08:05.797354 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Jan 04 12:08:06 crc kubenswrapper[5003]: I0104 12:08:06.226667 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"653b2e11-95ca-46e7-b28c-a1170d7a180b","Type":"ContainerStarted","Data":"e3265df4c8826b4c9a574f0a412029cd57e6e0130fed8c6aa5ff21f2e9f8a346"} Jan 04 12:08:06 crc kubenswrapper[5003]: I0104 12:08:06.229478 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-ksmlf" event={"ID":"988b3ee2-147b-4ebc-9d31-42bdaf144bc5","Type":"ContainerStarted","Data":"2de21e3795685f92c0995b8c0275e374995fb3d1feddd1208205a49fb45022bc"} Jan 04 12:08:06 crc kubenswrapper[5003]: I0104 12:08:06.229543 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-ksmlf" event={"ID":"988b3ee2-147b-4ebc-9d31-42bdaf144bc5","Type":"ContainerStarted","Data":"24f90983c5e9855fd85fd4d036576f9825383efadffd1d6553f46bd882adf303"} Jan 04 12:08:06 crc kubenswrapper[5003]: I0104 12:08:06.230981 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b79764b65-whgpg" event={"ID":"f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53","Type":"ContainerStarted","Data":"1629da10bdee237e178d3f310f7a71a18c96f292b3d223228122a16a642afbc0"} Jan 04 12:08:06 crc kubenswrapper[5003]: I0104 12:08:06.233356 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"829003dc-aa5e-43a6-a4f5-c578c73e76d4","Type":"ContainerStarted","Data":"6364cbec859dec141ca449b3d978906aef35d877a78403265122c821233736ff"} Jan 04 12:08:06 crc kubenswrapper[5003]: I0104 12:08:06.234876 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586b989cdc-rs66m" event={"ID":"967309ce-cd9e-4966-87fd-da0912bf701a","Type":"ContainerStarted","Data":"f10b995e43eec18faba6a9bc483cf5b19d2ebd3ca43784d911b6fc6f905d2947"} Jan 04 12:08:06 crc kubenswrapper[5003]: I0104 12:08:06.263577 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-ksmlf" podStartSLOduration=2.263554909 podStartE2EDuration="2.263554909s" podCreationTimestamp="2026-01-04 12:08:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:08:06.249757591 +0000 UTC m=+1201.722787452" watchObservedRunningTime="2026-01-04 12:08:06.263554909 +0000 UTC m=+1201.736584750" Jan 04 12:08:06 crc kubenswrapper[5003]: I0104 12:08:06.817537 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637" path="/var/lib/kubelet/pods/1a8e36a1-3fce-46ab-8e94-c0c8d7e3f637/volumes" Jan 04 12:08:06 crc kubenswrapper[5003]: I0104 12:08:06.817903 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21282961-7d45-4f5f-8e71-b7d369b23110" path="/var/lib/kubelet/pods/21282961-7d45-4f5f-8e71-b7d369b23110/volumes" Jan 04 12:08:07 crc kubenswrapper[5003]: I0104 12:08:07.260448 5003 generic.go:334] "Generic (PLEG): container finished" podID="f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53" containerID="284b00fcc022d84ab86dcabf11926ac9c152de2258492ec57e76596238fdc3e2" exitCode=0 Jan 04 12:08:07 crc kubenswrapper[5003]: I0104 12:08:07.260850 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b79764b65-whgpg" event={"ID":"f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53","Type":"ContainerDied","Data":"284b00fcc022d84ab86dcabf11926ac9c152de2258492ec57e76596238fdc3e2"} Jan 04 12:08:07 crc kubenswrapper[5003]: I0104 12:08:07.264254 5003 generic.go:334] "Generic (PLEG): container finished" podID="967309ce-cd9e-4966-87fd-da0912bf701a" containerID="2b20df2efc15be43b6d5466b005754a98c8743980da63fdc96b9de64d7424a8d" exitCode=0 Jan 04 12:08:07 crc kubenswrapper[5003]: I0104 12:08:07.264359 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586b989cdc-rs66m" event={"ID":"967309ce-cd9e-4966-87fd-da0912bf701a","Type":"ContainerDied","Data":"2b20df2efc15be43b6d5466b005754a98c8743980da63fdc96b9de64d7424a8d"} Jan 04 12:08:07 crc kubenswrapper[5003]: I0104 12:08:07.266402 5003 generic.go:334] "Generic (PLEG): container finished" podID="e9f73829-d0a1-4e4d-8f5a-755d63ce1caa" containerID="2a1ec64e5b0507e28d2e97d71cd1cc575cef24e7a7cb8c96be6fd1e98aabac73" exitCode=0 Jan 04 12:08:07 crc kubenswrapper[5003]: I0104 12:08:07.266523 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa","Type":"ContainerDied","Data":"2a1ec64e5b0507e28d2e97d71cd1cc575cef24e7a7cb8c96be6fd1e98aabac73"} Jan 04 12:08:07 crc kubenswrapper[5003]: I0104 12:08:07.270080 5003 generic.go:334] "Generic (PLEG): container finished" podID="30c47e0c-622e-4f66-a71d-f7e6cc0f23d9" containerID="7cfabb243db2df216602d5fb89bb4ba8ccc3ab533869f71cb92afb81824f5638" exitCode=0 Jan 04 12:08:07 crc kubenswrapper[5003]: I0104 12:08:07.271222 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9","Type":"ContainerDied","Data":"7cfabb243db2df216602d5fb89bb4ba8ccc3ab533869f71cb92afb81824f5638"} Jan 04 12:08:08 crc kubenswrapper[5003]: I0104 12:08:08.278669 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"653b2e11-95ca-46e7-b28c-a1170d7a180b","Type":"ContainerStarted","Data":"455bff1469c5fdbae60e33d2f9fdcc36a7531d6f5b1512eba0641e8420891546"} Jan 04 12:08:08 crc kubenswrapper[5003]: I0104 12:08:08.279112 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"653b2e11-95ca-46e7-b28c-a1170d7a180b","Type":"ContainerStarted","Data":"3e03c9994a07fa7e18a42bd5c17e591979717aa51ecb5df6f1ce553e5a854fff"} Jan 04 12:08:08 crc kubenswrapper[5003]: I0104 12:08:08.283165 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Jan 04 12:08:08 crc kubenswrapper[5003]: I0104 12:08:08.294433 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b79764b65-whgpg" event={"ID":"f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53","Type":"ContainerStarted","Data":"0bf66e149bc0a07d7756df872b567c84a6ba476cd3b14d06679d7798ee524ee8"} Jan 04 12:08:08 crc kubenswrapper[5003]: I0104 12:08:08.294924 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5b79764b65-whgpg" Jan 04 12:08:08 crc kubenswrapper[5003]: I0104 12:08:08.300049 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586b989cdc-rs66m" event={"ID":"967309ce-cd9e-4966-87fd-da0912bf701a","Type":"ContainerStarted","Data":"be0658409d3952a3d07b499b69dcd21fb8f8c24b2432df277ce2cdc4c6694340"} Jan 04 12:08:08 crc kubenswrapper[5003]: I0104 12:08:08.300699 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-586b989cdc-rs66m" Jan 04 12:08:08 crc kubenswrapper[5003]: I0104 12:08:08.303534 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa","Type":"ContainerStarted","Data":"7b0afbed514d598037465207941500c8d671c3e849c508673ddb681f2032cb98"} Jan 04 12:08:08 crc kubenswrapper[5003]: I0104 12:08:08.306107 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9","Type":"ContainerStarted","Data":"c51f66616df30830ce3cae7d185eefe24cf5be0fe4db029c3e5c213dfd1ab368"} Jan 04 12:08:08 crc kubenswrapper[5003]: I0104 12:08:08.322184 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.897380498 podStartE2EDuration="4.322150123s" podCreationTimestamp="2026-01-04 12:08:04 +0000 UTC" firstStartedPulling="2026-01-04 12:08:05.793725267 +0000 UTC m=+1201.266755108" lastFinishedPulling="2026-01-04 12:08:07.218494892 +0000 UTC m=+1202.691524733" observedRunningTime="2026-01-04 12:08:08.309766651 +0000 UTC m=+1203.782796582" watchObservedRunningTime="2026-01-04 12:08:08.322150123 +0000 UTC m=+1203.795179974" Jan 04 12:08:08 crc kubenswrapper[5003]: I0104 12:08:08.342164 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=8.170975654 podStartE2EDuration="59.342145073s" podCreationTimestamp="2026-01-04 12:07:09 +0000 UTC" firstStartedPulling="2026-01-04 12:07:11.000810589 +0000 UTC m=+1146.473840430" lastFinishedPulling="2026-01-04 12:08:02.171979998 +0000 UTC m=+1197.645009849" observedRunningTime="2026-01-04 12:08:08.334551145 +0000 UTC m=+1203.807580986" watchObservedRunningTime="2026-01-04 12:08:08.342145073 +0000 UTC m=+1203.815174914" Jan 04 12:08:08 crc kubenswrapper[5003]: I0104 12:08:08.358163 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-586b989cdc-rs66m" podStartSLOduration=3.8908486829999998 podStartE2EDuration="4.358137949s" podCreationTimestamp="2026-01-04 12:08:04 +0000 UTC" firstStartedPulling="2026-01-04 12:08:05.797369942 +0000 UTC m=+1201.270399783" lastFinishedPulling="2026-01-04 12:08:06.264659198 +0000 UTC m=+1201.737689049" observedRunningTime="2026-01-04 12:08:08.356593509 +0000 UTC m=+1203.829623350" watchObservedRunningTime="2026-01-04 12:08:08.358137949 +0000 UTC m=+1203.831167810" Jan 04 12:08:08 crc kubenswrapper[5003]: I0104 12:08:08.380221 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5b79764b65-whgpg" podStartSLOduration=3.867163446 podStartE2EDuration="4.380196783s" podCreationTimestamp="2026-01-04 12:08:04 +0000 UTC" firstStartedPulling="2026-01-04 12:08:05.714354232 +0000 UTC m=+1201.187384073" lastFinishedPulling="2026-01-04 12:08:06.227387569 +0000 UTC m=+1201.700417410" observedRunningTime="2026-01-04 12:08:08.374341201 +0000 UTC m=+1203.847371132" watchObservedRunningTime="2026-01-04 12:08:08.380196783 +0000 UTC m=+1203.853226634" Jan 04 12:08:08 crc kubenswrapper[5003]: I0104 12:08:08.406872 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=9.371430278 podStartE2EDuration="58.406844256s" podCreationTimestamp="2026-01-04 12:07:10 +0000 UTC" firstStartedPulling="2026-01-04 12:07:13.470525208 +0000 UTC m=+1148.943555049" lastFinishedPulling="2026-01-04 12:08:02.505939186 +0000 UTC m=+1197.978969027" observedRunningTime="2026-01-04 12:08:08.395766268 +0000 UTC m=+1203.868796169" watchObservedRunningTime="2026-01-04 12:08:08.406844256 +0000 UTC m=+1203.879874137" Jan 04 12:08:10 crc kubenswrapper[5003]: I0104 12:08:10.326195 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"274fbbf3-b927-408e-9594-946f6ea71638","Type":"ContainerStarted","Data":"7141902b674c0aba97218ef22d6317c2792d0dbefb479c305e2f864785706754"} Jan 04 12:08:10 crc kubenswrapper[5003]: I0104 12:08:10.327081 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Jan 04 12:08:10 crc kubenswrapper[5003]: I0104 12:08:10.329055 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"81193935-fcd0-4877-9d65-6155c1a888e2","Type":"ContainerStarted","Data":"13206dc80be6f8795f671b42dac3396c5e445e376c8796f74fffdbfb54487a41"} Jan 04 12:08:10 crc kubenswrapper[5003]: I0104 12:08:10.350240 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=2.567939058 podStartE2EDuration="58.350211513s" podCreationTimestamp="2026-01-04 12:07:12 +0000 UTC" firstStartedPulling="2026-01-04 12:07:13.463207808 +0000 UTC m=+1148.936237649" lastFinishedPulling="2026-01-04 12:08:09.245480263 +0000 UTC m=+1204.718510104" observedRunningTime="2026-01-04 12:08:10.347777039 +0000 UTC m=+1205.820806880" watchObservedRunningTime="2026-01-04 12:08:10.350211513 +0000 UTC m=+1205.823241374" Jan 04 12:08:10 crc kubenswrapper[5003]: I0104 12:08:10.481707 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Jan 04 12:08:10 crc kubenswrapper[5003]: I0104 12:08:10.481750 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Jan 04 12:08:12 crc kubenswrapper[5003]: E0104 12:08:12.042760 5003 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.5:59854->38.102.83.5:33783: write tcp 38.102.83.5:59854->38.102.83.5:33783: write: broken pipe Jan 04 12:08:12 crc kubenswrapper[5003]: I0104 12:08:12.245930 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Jan 04 12:08:12 crc kubenswrapper[5003]: I0104 12:08:12.246302 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Jan 04 12:08:12 crc kubenswrapper[5003]: I0104 12:08:12.345952 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Jan 04 12:08:12 crc kubenswrapper[5003]: I0104 12:08:12.437990 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Jan 04 12:08:15 crc kubenswrapper[5003]: I0104 12:08:15.143863 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5b79764b65-whgpg" Jan 04 12:08:15 crc kubenswrapper[5003]: I0104 12:08:15.167227 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-586b989cdc-rs66m" Jan 04 12:08:15 crc kubenswrapper[5003]: I0104 12:08:15.249223 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b79764b65-whgpg"] Jan 04 12:08:15 crc kubenswrapper[5003]: I0104 12:08:15.367049 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5b79764b65-whgpg" podUID="f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53" containerName="dnsmasq-dns" containerID="cri-o://0bf66e149bc0a07d7756df872b567c84a6ba476cd3b14d06679d7798ee524ee8" gracePeriod=10 Jan 04 12:08:17 crc kubenswrapper[5003]: I0104 12:08:17.382220 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"df21c365-a2e2-4a85-8de8-f132fd605981","Type":"ContainerStarted","Data":"984e6528ce905587f41a617865e1de59b2395782ff928f4d6b455026fd4b7eaf"} Jan 04 12:08:17 crc kubenswrapper[5003]: I0104 12:08:17.586766 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Jan 04 12:08:20 crc kubenswrapper[5003]: I0104 12:08:20.141359 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5b79764b65-whgpg" podUID="f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.113:5353: connect: connection refused" Jan 04 12:08:20 crc kubenswrapper[5003]: I0104 12:08:20.369436 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Jan 04 12:08:20 crc kubenswrapper[5003]: I0104 12:08:20.514984 5003 generic.go:334] "Generic (PLEG): container finished" podID="f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53" containerID="0bf66e149bc0a07d7756df872b567c84a6ba476cd3b14d06679d7798ee524ee8" exitCode=0 Jan 04 12:08:20 crc kubenswrapper[5003]: I0104 12:08:20.515164 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b79764b65-whgpg" event={"ID":"f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53","Type":"ContainerDied","Data":"0bf66e149bc0a07d7756df872b567c84a6ba476cd3b14d06679d7798ee524ee8"} Jan 04 12:08:20 crc kubenswrapper[5003]: I0104 12:08:20.685699 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-77cfc"] Jan 04 12:08:20 crc kubenswrapper[5003]: I0104 12:08:20.686766 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-77cfc" Jan 04 12:08:20 crc kubenswrapper[5003]: I0104 12:08:20.688868 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-mariadb-root-db-secret" Jan 04 12:08:20 crc kubenswrapper[5003]: I0104 12:08:20.695412 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-77cfc"] Jan 04 12:08:20 crc kubenswrapper[5003]: I0104 12:08:20.702899 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b79764b65-whgpg" Jan 04 12:08:20 crc kubenswrapper[5003]: I0104 12:08:20.776775 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53-dns-svc\") pod \"f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53\" (UID: \"f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53\") " Jan 04 12:08:20 crc kubenswrapper[5003]: I0104 12:08:20.776910 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8w2d8\" (UniqueName: \"kubernetes.io/projected/f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53-kube-api-access-8w2d8\") pod \"f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53\" (UID: \"f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53\") " Jan 04 12:08:20 crc kubenswrapper[5003]: I0104 12:08:20.776973 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53-ovsdbserver-sb\") pod \"f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53\" (UID: \"f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53\") " Jan 04 12:08:20 crc kubenswrapper[5003]: I0104 12:08:20.776998 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53-config\") pod \"f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53\" (UID: \"f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53\") " Jan 04 12:08:20 crc kubenswrapper[5003]: I0104 12:08:20.777766 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmj7s\" (UniqueName: \"kubernetes.io/projected/70fa4b5a-f151-4372-b6e5-de3d360e6734-kube-api-access-mmj7s\") pod \"root-account-create-update-77cfc\" (UID: \"70fa4b5a-f151-4372-b6e5-de3d360e6734\") " pod="openstack/root-account-create-update-77cfc" Jan 04 12:08:20 crc kubenswrapper[5003]: I0104 12:08:20.777997 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/70fa4b5a-f151-4372-b6e5-de3d360e6734-operator-scripts\") pod \"root-account-create-update-77cfc\" (UID: \"70fa4b5a-f151-4372-b6e5-de3d360e6734\") " pod="openstack/root-account-create-update-77cfc" Jan 04 12:08:20 crc kubenswrapper[5003]: I0104 12:08:20.787328 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53-kube-api-access-8w2d8" (OuterVolumeSpecName: "kube-api-access-8w2d8") pod "f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53" (UID: "f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53"). InnerVolumeSpecName "kube-api-access-8w2d8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:20 crc kubenswrapper[5003]: I0104 12:08:20.814324 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53" (UID: "f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:20 crc kubenswrapper[5003]: I0104 12:08:20.817998 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53-config" (OuterVolumeSpecName: "config") pod "f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53" (UID: "f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:20 crc kubenswrapper[5003]: I0104 12:08:20.826954 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53" (UID: "f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:20 crc kubenswrapper[5003]: I0104 12:08:20.879463 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/70fa4b5a-f151-4372-b6e5-de3d360e6734-operator-scripts\") pod \"root-account-create-update-77cfc\" (UID: \"70fa4b5a-f151-4372-b6e5-de3d360e6734\") " pod="openstack/root-account-create-update-77cfc" Jan 04 12:08:20 crc kubenswrapper[5003]: I0104 12:08:20.880356 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmj7s\" (UniqueName: \"kubernetes.io/projected/70fa4b5a-f151-4372-b6e5-de3d360e6734-kube-api-access-mmj7s\") pod \"root-account-create-update-77cfc\" (UID: \"70fa4b5a-f151-4372-b6e5-de3d360e6734\") " pod="openstack/root-account-create-update-77cfc" Jan 04 12:08:20 crc kubenswrapper[5003]: I0104 12:08:20.880587 5003 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:20 crc kubenswrapper[5003]: I0104 12:08:20.880608 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8w2d8\" (UniqueName: \"kubernetes.io/projected/f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53-kube-api-access-8w2d8\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:20 crc kubenswrapper[5003]: I0104 12:08:20.880621 5003 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:20 crc kubenswrapper[5003]: I0104 12:08:20.880637 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:20 crc kubenswrapper[5003]: I0104 12:08:20.880865 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/70fa4b5a-f151-4372-b6e5-de3d360e6734-operator-scripts\") pod \"root-account-create-update-77cfc\" (UID: \"70fa4b5a-f151-4372-b6e5-de3d360e6734\") " pod="openstack/root-account-create-update-77cfc" Jan 04 12:08:20 crc kubenswrapper[5003]: I0104 12:08:20.896875 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmj7s\" (UniqueName: \"kubernetes.io/projected/70fa4b5a-f151-4372-b6e5-de3d360e6734-kube-api-access-mmj7s\") pod \"root-account-create-update-77cfc\" (UID: \"70fa4b5a-f151-4372-b6e5-de3d360e6734\") " pod="openstack/root-account-create-update-77cfc" Jan 04 12:08:21 crc kubenswrapper[5003]: I0104 12:08:21.020133 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-77cfc" Jan 04 12:08:21 crc kubenswrapper[5003]: I0104 12:08:21.493540 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-77cfc"] Jan 04 12:08:21 crc kubenswrapper[5003]: I0104 12:08:21.524891 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-77cfc" event={"ID":"70fa4b5a-f151-4372-b6e5-de3d360e6734","Type":"ContainerStarted","Data":"b3b6af67ca9877198718a2df59d065f1f51cb92fdd12936d42b2ba9560722ed2"} Jan 04 12:08:21 crc kubenswrapper[5003]: I0104 12:08:21.528455 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b79764b65-whgpg" Jan 04 12:08:21 crc kubenswrapper[5003]: I0104 12:08:21.528487 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b79764b65-whgpg" event={"ID":"f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53","Type":"ContainerDied","Data":"1629da10bdee237e178d3f310f7a71a18c96f292b3d223228122a16a642afbc0"} Jan 04 12:08:21 crc kubenswrapper[5003]: I0104 12:08:21.528515 5003 scope.go:117] "RemoveContainer" containerID="0bf66e149bc0a07d7756df872b567c84a6ba476cd3b14d06679d7798ee524ee8" Jan 04 12:08:21 crc kubenswrapper[5003]: I0104 12:08:21.529107 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Jan 04 12:08:21 crc kubenswrapper[5003]: I0104 12:08:21.548417 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=11.482866341 podStartE2EDuration="1m8.548397331s" podCreationTimestamp="2026-01-04 12:07:13 +0000 UTC" firstStartedPulling="2026-01-04 12:07:15.039207616 +0000 UTC m=+1150.512237457" lastFinishedPulling="2026-01-04 12:08:12.104738606 +0000 UTC m=+1207.577768447" observedRunningTime="2026-01-04 12:08:21.540916816 +0000 UTC m=+1217.013946657" watchObservedRunningTime="2026-01-04 12:08:21.548397331 +0000 UTC m=+1217.021427172" Jan 04 12:08:21 crc kubenswrapper[5003]: I0104 12:08:21.569343 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b79764b65-whgpg"] Jan 04 12:08:21 crc kubenswrapper[5003]: I0104 12:08:21.572561 5003 scope.go:117] "RemoveContainer" containerID="284b00fcc022d84ab86dcabf11926ac9c152de2258492ec57e76596238fdc3e2" Jan 04 12:08:21 crc kubenswrapper[5003]: I0104 12:08:21.576332 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b79764b65-whgpg"] Jan 04 12:08:21 crc kubenswrapper[5003]: I0104 12:08:21.730063 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Jan 04 12:08:21 crc kubenswrapper[5003]: I0104 12:08:21.818077 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Jan 04 12:08:22 crc kubenswrapper[5003]: I0104 12:08:22.283633 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-55ncj"] Jan 04 12:08:22 crc kubenswrapper[5003]: E0104 12:08:22.284346 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53" containerName="init" Jan 04 12:08:22 crc kubenswrapper[5003]: I0104 12:08:22.284360 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53" containerName="init" Jan 04 12:08:22 crc kubenswrapper[5003]: E0104 12:08:22.284388 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53" containerName="dnsmasq-dns" Jan 04 12:08:22 crc kubenswrapper[5003]: I0104 12:08:22.284394 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53" containerName="dnsmasq-dns" Jan 04 12:08:22 crc kubenswrapper[5003]: I0104 12:08:22.284548 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53" containerName="dnsmasq-dns" Jan 04 12:08:22 crc kubenswrapper[5003]: I0104 12:08:22.285152 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-55ncj" Jan 04 12:08:22 crc kubenswrapper[5003]: I0104 12:08:22.294589 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-55ncj"] Jan 04 12:08:22 crc kubenswrapper[5003]: I0104 12:08:22.322453 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-e45f-account-create-update-llf8z"] Jan 04 12:08:22 crc kubenswrapper[5003]: I0104 12:08:22.338601 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-e45f-account-create-update-llf8z" Jan 04 12:08:22 crc kubenswrapper[5003]: I0104 12:08:22.342296 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Jan 04 12:08:22 crc kubenswrapper[5003]: I0104 12:08:22.345317 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-e45f-account-create-update-llf8z"] Jan 04 12:08:22 crc kubenswrapper[5003]: I0104 12:08:22.416120 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/346862a9-1434-4cd7-bd85-1bbf1c02431d-operator-scripts\") pod \"placement-e45f-account-create-update-llf8z\" (UID: \"346862a9-1434-4cd7-bd85-1bbf1c02431d\") " pod="openstack/placement-e45f-account-create-update-llf8z" Jan 04 12:08:22 crc kubenswrapper[5003]: I0104 12:08:22.416189 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sg5gt\" (UniqueName: \"kubernetes.io/projected/346862a9-1434-4cd7-bd85-1bbf1c02431d-kube-api-access-sg5gt\") pod \"placement-e45f-account-create-update-llf8z\" (UID: \"346862a9-1434-4cd7-bd85-1bbf1c02431d\") " pod="openstack/placement-e45f-account-create-update-llf8z" Jan 04 12:08:22 crc kubenswrapper[5003]: I0104 12:08:22.416276 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af87a6b9-8483-47b6-a458-4cde08f820ab-operator-scripts\") pod \"placement-db-create-55ncj\" (UID: \"af87a6b9-8483-47b6-a458-4cde08f820ab\") " pod="openstack/placement-db-create-55ncj" Jan 04 12:08:22 crc kubenswrapper[5003]: I0104 12:08:22.416303 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46dgn\" (UniqueName: \"kubernetes.io/projected/af87a6b9-8483-47b6-a458-4cde08f820ab-kube-api-access-46dgn\") pod \"placement-db-create-55ncj\" (UID: \"af87a6b9-8483-47b6-a458-4cde08f820ab\") " pod="openstack/placement-db-create-55ncj" Jan 04 12:08:22 crc kubenswrapper[5003]: I0104 12:08:22.517966 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/346862a9-1434-4cd7-bd85-1bbf1c02431d-operator-scripts\") pod \"placement-e45f-account-create-update-llf8z\" (UID: \"346862a9-1434-4cd7-bd85-1bbf1c02431d\") " pod="openstack/placement-e45f-account-create-update-llf8z" Jan 04 12:08:22 crc kubenswrapper[5003]: I0104 12:08:22.518130 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sg5gt\" (UniqueName: \"kubernetes.io/projected/346862a9-1434-4cd7-bd85-1bbf1c02431d-kube-api-access-sg5gt\") pod \"placement-e45f-account-create-update-llf8z\" (UID: \"346862a9-1434-4cd7-bd85-1bbf1c02431d\") " pod="openstack/placement-e45f-account-create-update-llf8z" Jan 04 12:08:22 crc kubenswrapper[5003]: I0104 12:08:22.518182 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af87a6b9-8483-47b6-a458-4cde08f820ab-operator-scripts\") pod \"placement-db-create-55ncj\" (UID: \"af87a6b9-8483-47b6-a458-4cde08f820ab\") " pod="openstack/placement-db-create-55ncj" Jan 04 12:08:22 crc kubenswrapper[5003]: I0104 12:08:22.518234 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46dgn\" (UniqueName: \"kubernetes.io/projected/af87a6b9-8483-47b6-a458-4cde08f820ab-kube-api-access-46dgn\") pod \"placement-db-create-55ncj\" (UID: \"af87a6b9-8483-47b6-a458-4cde08f820ab\") " pod="openstack/placement-db-create-55ncj" Jan 04 12:08:22 crc kubenswrapper[5003]: I0104 12:08:22.519169 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af87a6b9-8483-47b6-a458-4cde08f820ab-operator-scripts\") pod \"placement-db-create-55ncj\" (UID: \"af87a6b9-8483-47b6-a458-4cde08f820ab\") " pod="openstack/placement-db-create-55ncj" Jan 04 12:08:22 crc kubenswrapper[5003]: I0104 12:08:22.519368 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/346862a9-1434-4cd7-bd85-1bbf1c02431d-operator-scripts\") pod \"placement-e45f-account-create-update-llf8z\" (UID: \"346862a9-1434-4cd7-bd85-1bbf1c02431d\") " pod="openstack/placement-e45f-account-create-update-llf8z" Jan 04 12:08:22 crc kubenswrapper[5003]: I0104 12:08:22.539770 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sg5gt\" (UniqueName: \"kubernetes.io/projected/346862a9-1434-4cd7-bd85-1bbf1c02431d-kube-api-access-sg5gt\") pod \"placement-e45f-account-create-update-llf8z\" (UID: \"346862a9-1434-4cd7-bd85-1bbf1c02431d\") " pod="openstack/placement-e45f-account-create-update-llf8z" Jan 04 12:08:22 crc kubenswrapper[5003]: I0104 12:08:22.544645 5003 generic.go:334] "Generic (PLEG): container finished" podID="70fa4b5a-f151-4372-b6e5-de3d360e6734" containerID="8005247d3ecf421639fe3cce12ad4b6ea9304418c4fac60baaa23389f71401c7" exitCode=0 Jan 04 12:08:22 crc kubenswrapper[5003]: I0104 12:08:22.544731 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-77cfc" event={"ID":"70fa4b5a-f151-4372-b6e5-de3d360e6734","Type":"ContainerDied","Data":"8005247d3ecf421639fe3cce12ad4b6ea9304418c4fac60baaa23389f71401c7"} Jan 04 12:08:22 crc kubenswrapper[5003]: I0104 12:08:22.544657 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46dgn\" (UniqueName: \"kubernetes.io/projected/af87a6b9-8483-47b6-a458-4cde08f820ab-kube-api-access-46dgn\") pod \"placement-db-create-55ncj\" (UID: \"af87a6b9-8483-47b6-a458-4cde08f820ab\") " pod="openstack/placement-db-create-55ncj" Jan 04 12:08:22 crc kubenswrapper[5003]: I0104 12:08:22.552257 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Jan 04 12:08:22 crc kubenswrapper[5003]: I0104 12:08:22.633225 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-55ncj" Jan 04 12:08:22 crc kubenswrapper[5003]: I0104 12:08:22.666894 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-e45f-account-create-update-llf8z" Jan 04 12:08:22 crc kubenswrapper[5003]: I0104 12:08:22.817627 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53" path="/var/lib/kubelet/pods/f3be3e7d-4c26-4e6f-a620-ccc27e2a4e53/volumes" Jan 04 12:08:23 crc kubenswrapper[5003]: I0104 12:08:23.102435 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-e45f-account-create-update-llf8z"] Jan 04 12:08:23 crc kubenswrapper[5003]: W0104 12:08:23.103224 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod346862a9_1434_4cd7_bd85_1bbf1c02431d.slice/crio-cc06e3a445c1e3e1437655661fdacbc23642d92cc227948d2f7eda49344438e6 WatchSource:0}: Error finding container cc06e3a445c1e3e1437655661fdacbc23642d92cc227948d2f7eda49344438e6: Status 404 returned error can't find the container with id cc06e3a445c1e3e1437655661fdacbc23642d92cc227948d2f7eda49344438e6 Jan 04 12:08:23 crc kubenswrapper[5003]: I0104 12:08:23.166400 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-55ncj"] Jan 04 12:08:23 crc kubenswrapper[5003]: W0104 12:08:23.172318 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaf87a6b9_8483_47b6_a458_4cde08f820ab.slice/crio-56e25974a35cdd56036c30565c310c7213d099694632300c26e2e6d4edc48246 WatchSource:0}: Error finding container 56e25974a35cdd56036c30565c310c7213d099694632300c26e2e6d4edc48246: Status 404 returned error can't find the container with id 56e25974a35cdd56036c30565c310c7213d099694632300c26e2e6d4edc48246 Jan 04 12:08:23 crc kubenswrapper[5003]: I0104 12:08:23.555635 5003 generic.go:334] "Generic (PLEG): container finished" podID="af87a6b9-8483-47b6-a458-4cde08f820ab" containerID="518da8e529d9f48a5912c26b7dbf54ba4ec156e0b77d21fc2b8ed1024cf45d99" exitCode=0 Jan 04 12:08:23 crc kubenswrapper[5003]: I0104 12:08:23.555749 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-55ncj" event={"ID":"af87a6b9-8483-47b6-a458-4cde08f820ab","Type":"ContainerDied","Data":"518da8e529d9f48a5912c26b7dbf54ba4ec156e0b77d21fc2b8ed1024cf45d99"} Jan 04 12:08:23 crc kubenswrapper[5003]: I0104 12:08:23.556193 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-55ncj" event={"ID":"af87a6b9-8483-47b6-a458-4cde08f820ab","Type":"ContainerStarted","Data":"56e25974a35cdd56036c30565c310c7213d099694632300c26e2e6d4edc48246"} Jan 04 12:08:23 crc kubenswrapper[5003]: I0104 12:08:23.557688 5003 generic.go:334] "Generic (PLEG): container finished" podID="346862a9-1434-4cd7-bd85-1bbf1c02431d" containerID="ec2ccb2a7819df6465119db8e29c6514123f5c5f6fac83c9f35268a3120edafb" exitCode=0 Jan 04 12:08:23 crc kubenswrapper[5003]: I0104 12:08:23.557702 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-e45f-account-create-update-llf8z" event={"ID":"346862a9-1434-4cd7-bd85-1bbf1c02431d","Type":"ContainerDied","Data":"ec2ccb2a7819df6465119db8e29c6514123f5c5f6fac83c9f35268a3120edafb"} Jan 04 12:08:23 crc kubenswrapper[5003]: I0104 12:08:23.557777 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-e45f-account-create-update-llf8z" event={"ID":"346862a9-1434-4cd7-bd85-1bbf1c02431d","Type":"ContainerStarted","Data":"cc06e3a445c1e3e1437655661fdacbc23642d92cc227948d2f7eda49344438e6"} Jan 04 12:08:23 crc kubenswrapper[5003]: I0104 12:08:23.881692 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-77cfc" Jan 04 12:08:23 crc kubenswrapper[5003]: I0104 12:08:23.943428 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/70fa4b5a-f151-4372-b6e5-de3d360e6734-operator-scripts\") pod \"70fa4b5a-f151-4372-b6e5-de3d360e6734\" (UID: \"70fa4b5a-f151-4372-b6e5-de3d360e6734\") " Jan 04 12:08:23 crc kubenswrapper[5003]: I0104 12:08:23.943516 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mmj7s\" (UniqueName: \"kubernetes.io/projected/70fa4b5a-f151-4372-b6e5-de3d360e6734-kube-api-access-mmj7s\") pod \"70fa4b5a-f151-4372-b6e5-de3d360e6734\" (UID: \"70fa4b5a-f151-4372-b6e5-de3d360e6734\") " Jan 04 12:08:23 crc kubenswrapper[5003]: I0104 12:08:23.944276 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/70fa4b5a-f151-4372-b6e5-de3d360e6734-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "70fa4b5a-f151-4372-b6e5-de3d360e6734" (UID: "70fa4b5a-f151-4372-b6e5-de3d360e6734"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:23 crc kubenswrapper[5003]: I0104 12:08:23.952168 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70fa4b5a-f151-4372-b6e5-de3d360e6734-kube-api-access-mmj7s" (OuterVolumeSpecName: "kube-api-access-mmj7s") pod "70fa4b5a-f151-4372-b6e5-de3d360e6734" (UID: "70fa4b5a-f151-4372-b6e5-de3d360e6734"). InnerVolumeSpecName "kube-api-access-mmj7s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:24 crc kubenswrapper[5003]: I0104 12:08:24.045579 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/70fa4b5a-f151-4372-b6e5-de3d360e6734-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:24 crc kubenswrapper[5003]: I0104 12:08:24.045620 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mmj7s\" (UniqueName: \"kubernetes.io/projected/70fa4b5a-f151-4372-b6e5-de3d360e6734-kube-api-access-mmj7s\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:24 crc kubenswrapper[5003]: I0104 12:08:24.267268 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67fdf7998c-npwcd"] Jan 04 12:08:24 crc kubenswrapper[5003]: E0104 12:08:24.267810 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70fa4b5a-f151-4372-b6e5-de3d360e6734" containerName="mariadb-account-create-update" Jan 04 12:08:24 crc kubenswrapper[5003]: I0104 12:08:24.267875 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="70fa4b5a-f151-4372-b6e5-de3d360e6734" containerName="mariadb-account-create-update" Jan 04 12:08:24 crc kubenswrapper[5003]: I0104 12:08:24.268195 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="70fa4b5a-f151-4372-b6e5-de3d360e6734" containerName="mariadb-account-create-update" Jan 04 12:08:24 crc kubenswrapper[5003]: I0104 12:08:24.269085 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67fdf7998c-npwcd" Jan 04 12:08:24 crc kubenswrapper[5003]: I0104 12:08:24.356470 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dl25f\" (UniqueName: \"kubernetes.io/projected/33a31de1-636d-416a-a0a3-1f5cb2de8ce1-kube-api-access-dl25f\") pod \"dnsmasq-dns-67fdf7998c-npwcd\" (UID: \"33a31de1-636d-416a-a0a3-1f5cb2de8ce1\") " pod="openstack/dnsmasq-dns-67fdf7998c-npwcd" Jan 04 12:08:24 crc kubenswrapper[5003]: I0104 12:08:24.356581 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/33a31de1-636d-416a-a0a3-1f5cb2de8ce1-dns-svc\") pod \"dnsmasq-dns-67fdf7998c-npwcd\" (UID: \"33a31de1-636d-416a-a0a3-1f5cb2de8ce1\") " pod="openstack/dnsmasq-dns-67fdf7998c-npwcd" Jan 04 12:08:24 crc kubenswrapper[5003]: I0104 12:08:24.356607 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/33a31de1-636d-416a-a0a3-1f5cb2de8ce1-ovsdbserver-sb\") pod \"dnsmasq-dns-67fdf7998c-npwcd\" (UID: \"33a31de1-636d-416a-a0a3-1f5cb2de8ce1\") " pod="openstack/dnsmasq-dns-67fdf7998c-npwcd" Jan 04 12:08:24 crc kubenswrapper[5003]: I0104 12:08:24.356636 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33a31de1-636d-416a-a0a3-1f5cb2de8ce1-config\") pod \"dnsmasq-dns-67fdf7998c-npwcd\" (UID: \"33a31de1-636d-416a-a0a3-1f5cb2de8ce1\") " pod="openstack/dnsmasq-dns-67fdf7998c-npwcd" Jan 04 12:08:24 crc kubenswrapper[5003]: I0104 12:08:24.356671 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/33a31de1-636d-416a-a0a3-1f5cb2de8ce1-ovsdbserver-nb\") pod \"dnsmasq-dns-67fdf7998c-npwcd\" (UID: \"33a31de1-636d-416a-a0a3-1f5cb2de8ce1\") " pod="openstack/dnsmasq-dns-67fdf7998c-npwcd" Jan 04 12:08:24 crc kubenswrapper[5003]: I0104 12:08:24.375829 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67fdf7998c-npwcd"] Jan 04 12:08:24 crc kubenswrapper[5003]: I0104 12:08:24.459299 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dl25f\" (UniqueName: \"kubernetes.io/projected/33a31de1-636d-416a-a0a3-1f5cb2de8ce1-kube-api-access-dl25f\") pod \"dnsmasq-dns-67fdf7998c-npwcd\" (UID: \"33a31de1-636d-416a-a0a3-1f5cb2de8ce1\") " pod="openstack/dnsmasq-dns-67fdf7998c-npwcd" Jan 04 12:08:24 crc kubenswrapper[5003]: I0104 12:08:24.459400 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/33a31de1-636d-416a-a0a3-1f5cb2de8ce1-dns-svc\") pod \"dnsmasq-dns-67fdf7998c-npwcd\" (UID: \"33a31de1-636d-416a-a0a3-1f5cb2de8ce1\") " pod="openstack/dnsmasq-dns-67fdf7998c-npwcd" Jan 04 12:08:24 crc kubenswrapper[5003]: I0104 12:08:24.459442 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/33a31de1-636d-416a-a0a3-1f5cb2de8ce1-ovsdbserver-sb\") pod \"dnsmasq-dns-67fdf7998c-npwcd\" (UID: \"33a31de1-636d-416a-a0a3-1f5cb2de8ce1\") " pod="openstack/dnsmasq-dns-67fdf7998c-npwcd" Jan 04 12:08:24 crc kubenswrapper[5003]: I0104 12:08:24.459465 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33a31de1-636d-416a-a0a3-1f5cb2de8ce1-config\") pod \"dnsmasq-dns-67fdf7998c-npwcd\" (UID: \"33a31de1-636d-416a-a0a3-1f5cb2de8ce1\") " pod="openstack/dnsmasq-dns-67fdf7998c-npwcd" Jan 04 12:08:24 crc kubenswrapper[5003]: I0104 12:08:24.459505 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/33a31de1-636d-416a-a0a3-1f5cb2de8ce1-ovsdbserver-nb\") pod \"dnsmasq-dns-67fdf7998c-npwcd\" (UID: \"33a31de1-636d-416a-a0a3-1f5cb2de8ce1\") " pod="openstack/dnsmasq-dns-67fdf7998c-npwcd" Jan 04 12:08:24 crc kubenswrapper[5003]: I0104 12:08:24.460586 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/33a31de1-636d-416a-a0a3-1f5cb2de8ce1-ovsdbserver-nb\") pod \"dnsmasq-dns-67fdf7998c-npwcd\" (UID: \"33a31de1-636d-416a-a0a3-1f5cb2de8ce1\") " pod="openstack/dnsmasq-dns-67fdf7998c-npwcd" Jan 04 12:08:24 crc kubenswrapper[5003]: I0104 12:08:24.461866 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33a31de1-636d-416a-a0a3-1f5cb2de8ce1-config\") pod \"dnsmasq-dns-67fdf7998c-npwcd\" (UID: \"33a31de1-636d-416a-a0a3-1f5cb2de8ce1\") " pod="openstack/dnsmasq-dns-67fdf7998c-npwcd" Jan 04 12:08:24 crc kubenswrapper[5003]: I0104 12:08:24.462428 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/33a31de1-636d-416a-a0a3-1f5cb2de8ce1-dns-svc\") pod \"dnsmasq-dns-67fdf7998c-npwcd\" (UID: \"33a31de1-636d-416a-a0a3-1f5cb2de8ce1\") " pod="openstack/dnsmasq-dns-67fdf7998c-npwcd" Jan 04 12:08:24 crc kubenswrapper[5003]: I0104 12:08:24.469214 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/33a31de1-636d-416a-a0a3-1f5cb2de8ce1-ovsdbserver-sb\") pod \"dnsmasq-dns-67fdf7998c-npwcd\" (UID: \"33a31de1-636d-416a-a0a3-1f5cb2de8ce1\") " pod="openstack/dnsmasq-dns-67fdf7998c-npwcd" Jan 04 12:08:24 crc kubenswrapper[5003]: I0104 12:08:24.490878 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dl25f\" (UniqueName: \"kubernetes.io/projected/33a31de1-636d-416a-a0a3-1f5cb2de8ce1-kube-api-access-dl25f\") pod \"dnsmasq-dns-67fdf7998c-npwcd\" (UID: \"33a31de1-636d-416a-a0a3-1f5cb2de8ce1\") " pod="openstack/dnsmasq-dns-67fdf7998c-npwcd" Jan 04 12:08:24 crc kubenswrapper[5003]: I0104 12:08:24.566110 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-77cfc" Jan 04 12:08:24 crc kubenswrapper[5003]: I0104 12:08:24.566117 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-77cfc" event={"ID":"70fa4b5a-f151-4372-b6e5-de3d360e6734","Type":"ContainerDied","Data":"b3b6af67ca9877198718a2df59d065f1f51cb92fdd12936d42b2ba9560722ed2"} Jan 04 12:08:24 crc kubenswrapper[5003]: I0104 12:08:24.566251 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b3b6af67ca9877198718a2df59d065f1f51cb92fdd12936d42b2ba9560722ed2" Jan 04 12:08:24 crc kubenswrapper[5003]: I0104 12:08:24.615516 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67fdf7998c-npwcd" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.018709 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-e45f-account-create-update-llf8z" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.023706 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-55ncj" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.177288 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/346862a9-1434-4cd7-bd85-1bbf1c02431d-operator-scripts\") pod \"346862a9-1434-4cd7-bd85-1bbf1c02431d\" (UID: \"346862a9-1434-4cd7-bd85-1bbf1c02431d\") " Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.178215 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sg5gt\" (UniqueName: \"kubernetes.io/projected/346862a9-1434-4cd7-bd85-1bbf1c02431d-kube-api-access-sg5gt\") pod \"346862a9-1434-4cd7-bd85-1bbf1c02431d\" (UID: \"346862a9-1434-4cd7-bd85-1bbf1c02431d\") " Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.178377 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af87a6b9-8483-47b6-a458-4cde08f820ab-operator-scripts\") pod \"af87a6b9-8483-47b6-a458-4cde08f820ab\" (UID: \"af87a6b9-8483-47b6-a458-4cde08f820ab\") " Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.178424 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/346862a9-1434-4cd7-bd85-1bbf1c02431d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "346862a9-1434-4cd7-bd85-1bbf1c02431d" (UID: "346862a9-1434-4cd7-bd85-1bbf1c02431d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.178676 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-46dgn\" (UniqueName: \"kubernetes.io/projected/af87a6b9-8483-47b6-a458-4cde08f820ab-kube-api-access-46dgn\") pod \"af87a6b9-8483-47b6-a458-4cde08f820ab\" (UID: \"af87a6b9-8483-47b6-a458-4cde08f820ab\") " Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.178770 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af87a6b9-8483-47b6-a458-4cde08f820ab-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "af87a6b9-8483-47b6-a458-4cde08f820ab" (UID: "af87a6b9-8483-47b6-a458-4cde08f820ab"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.179375 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af87a6b9-8483-47b6-a458-4cde08f820ab-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.179487 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/346862a9-1434-4cd7-bd85-1bbf1c02431d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.189309 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af87a6b9-8483-47b6-a458-4cde08f820ab-kube-api-access-46dgn" (OuterVolumeSpecName: "kube-api-access-46dgn") pod "af87a6b9-8483-47b6-a458-4cde08f820ab" (UID: "af87a6b9-8483-47b6-a458-4cde08f820ab"). InnerVolumeSpecName "kube-api-access-46dgn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.189792 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/346862a9-1434-4cd7-bd85-1bbf1c02431d-kube-api-access-sg5gt" (OuterVolumeSpecName: "kube-api-access-sg5gt") pod "346862a9-1434-4cd7-bd85-1bbf1c02431d" (UID: "346862a9-1434-4cd7-bd85-1bbf1c02431d"). InnerVolumeSpecName "kube-api-access-sg5gt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.199488 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67fdf7998c-npwcd"] Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.281828 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-46dgn\" (UniqueName: \"kubernetes.io/projected/af87a6b9-8483-47b6-a458-4cde08f820ab-kube-api-access-46dgn\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.281872 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sg5gt\" (UniqueName: \"kubernetes.io/projected/346862a9-1434-4cd7-bd85-1bbf1c02431d-kube-api-access-sg5gt\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.503252 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Jan 04 12:08:25 crc kubenswrapper[5003]: E0104 12:08:25.503663 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="346862a9-1434-4cd7-bd85-1bbf1c02431d" containerName="mariadb-account-create-update" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.503683 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="346862a9-1434-4cd7-bd85-1bbf1c02431d" containerName="mariadb-account-create-update" Jan 04 12:08:25 crc kubenswrapper[5003]: E0104 12:08:25.503709 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af87a6b9-8483-47b6-a458-4cde08f820ab" containerName="mariadb-database-create" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.503715 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="af87a6b9-8483-47b6-a458-4cde08f820ab" containerName="mariadb-database-create" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.503874 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="af87a6b9-8483-47b6-a458-4cde08f820ab" containerName="mariadb-database-create" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.503899 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="346862a9-1434-4cd7-bd85-1bbf1c02431d" containerName="mariadb-account-create-update" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.508375 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.509627 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.510274 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.511492 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-bv6rw" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.511563 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.511723 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.575397 5003 generic.go:334] "Generic (PLEG): container finished" podID="33a31de1-636d-416a-a0a3-1f5cb2de8ce1" containerID="c8b19130b9f9dcd1a1e6ba1282dff9ff15372e0d198b03af0cf08830634c7998" exitCode=0 Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.575489 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67fdf7998c-npwcd" event={"ID":"33a31de1-636d-416a-a0a3-1f5cb2de8ce1","Type":"ContainerDied","Data":"c8b19130b9f9dcd1a1e6ba1282dff9ff15372e0d198b03af0cf08830634c7998"} Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.575528 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67fdf7998c-npwcd" event={"ID":"33a31de1-636d-416a-a0a3-1f5cb2de8ce1","Type":"ContainerStarted","Data":"6dfe568c31772bd3f0a29cae3451cab2c92d078303f46605e9dcb8295f888360"} Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.578086 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-55ncj" event={"ID":"af87a6b9-8483-47b6-a458-4cde08f820ab","Type":"ContainerDied","Data":"56e25974a35cdd56036c30565c310c7213d099694632300c26e2e6d4edc48246"} Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.578130 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="56e25974a35cdd56036c30565c310c7213d099694632300c26e2e6d4edc48246" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.578184 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-55ncj" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.580694 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-e45f-account-create-update-llf8z" event={"ID":"346862a9-1434-4cd7-bd85-1bbf1c02431d","Type":"ContainerDied","Data":"cc06e3a445c1e3e1437655661fdacbc23642d92cc227948d2f7eda49344438e6"} Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.580728 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cc06e3a445c1e3e1437655661fdacbc23642d92cc227948d2f7eda49344438e6" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.580781 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-e45f-account-create-update-llf8z" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.694572 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e387635d-9ef2-4b1d-9303-0d762e8b282c-etc-swift\") pod \"swift-storage-0\" (UID: \"e387635d-9ef2-4b1d-9303-0d762e8b282c\") " pod="openstack/swift-storage-0" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.694735 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/e387635d-9ef2-4b1d-9303-0d762e8b282c-lock\") pod \"swift-storage-0\" (UID: \"e387635d-9ef2-4b1d-9303-0d762e8b282c\") " pod="openstack/swift-storage-0" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.694768 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdjv8\" (UniqueName: \"kubernetes.io/projected/e387635d-9ef2-4b1d-9303-0d762e8b282c-kube-api-access-vdjv8\") pod \"swift-storage-0\" (UID: \"e387635d-9ef2-4b1d-9303-0d762e8b282c\") " pod="openstack/swift-storage-0" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.694816 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"e387635d-9ef2-4b1d-9303-0d762e8b282c\") " pod="openstack/swift-storage-0" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.694931 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/e387635d-9ef2-4b1d-9303-0d762e8b282c-cache\") pod \"swift-storage-0\" (UID: \"e387635d-9ef2-4b1d-9303-0d762e8b282c\") " pod="openstack/swift-storage-0" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.796521 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/e387635d-9ef2-4b1d-9303-0d762e8b282c-cache\") pod \"swift-storage-0\" (UID: \"e387635d-9ef2-4b1d-9303-0d762e8b282c\") " pod="openstack/swift-storage-0" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.796908 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e387635d-9ef2-4b1d-9303-0d762e8b282c-etc-swift\") pod \"swift-storage-0\" (UID: \"e387635d-9ef2-4b1d-9303-0d762e8b282c\") " pod="openstack/swift-storage-0" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.797038 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/e387635d-9ef2-4b1d-9303-0d762e8b282c-lock\") pod \"swift-storage-0\" (UID: \"e387635d-9ef2-4b1d-9303-0d762e8b282c\") " pod="openstack/swift-storage-0" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.797080 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdjv8\" (UniqueName: \"kubernetes.io/projected/e387635d-9ef2-4b1d-9303-0d762e8b282c-kube-api-access-vdjv8\") pod \"swift-storage-0\" (UID: \"e387635d-9ef2-4b1d-9303-0d762e8b282c\") " pod="openstack/swift-storage-0" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.797141 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"e387635d-9ef2-4b1d-9303-0d762e8b282c\") " pod="openstack/swift-storage-0" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.797936 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"e387635d-9ef2-4b1d-9303-0d762e8b282c\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/swift-storage-0" Jan 04 12:08:25 crc kubenswrapper[5003]: E0104 12:08:25.798356 5003 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 04 12:08:25 crc kubenswrapper[5003]: E0104 12:08:25.798403 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 04 12:08:25 crc kubenswrapper[5003]: E0104 12:08:25.798467 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e387635d-9ef2-4b1d-9303-0d762e8b282c-etc-swift podName:e387635d-9ef2-4b1d-9303-0d762e8b282c nodeName:}" failed. No retries permitted until 2026-01-04 12:08:26.298443743 +0000 UTC m=+1221.771473584 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e387635d-9ef2-4b1d-9303-0d762e8b282c-etc-swift") pod "swift-storage-0" (UID: "e387635d-9ef2-4b1d-9303-0d762e8b282c") : configmap "swift-ring-files" not found Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.798994 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/e387635d-9ef2-4b1d-9303-0d762e8b282c-cache\") pod \"swift-storage-0\" (UID: \"e387635d-9ef2-4b1d-9303-0d762e8b282c\") " pod="openstack/swift-storage-0" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.799320 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/e387635d-9ef2-4b1d-9303-0d762e8b282c-lock\") pod \"swift-storage-0\" (UID: \"e387635d-9ef2-4b1d-9303-0d762e8b282c\") " pod="openstack/swift-storage-0" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.822304 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdjv8\" (UniqueName: \"kubernetes.io/projected/e387635d-9ef2-4b1d-9303-0d762e8b282c-kube-api-access-vdjv8\") pod \"swift-storage-0\" (UID: \"e387635d-9ef2-4b1d-9303-0d762e8b282c\") " pod="openstack/swift-storage-0" Jan 04 12:08:25 crc kubenswrapper[5003]: I0104 12:08:25.828126 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"e387635d-9ef2-4b1d-9303-0d762e8b282c\") " pod="openstack/swift-storage-0" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.102078 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-tb8rm"] Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.103316 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-tb8rm" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.105550 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.105730 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.112794 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.120540 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-tb8rm"] Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.156522 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-g4p7b"] Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.157825 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-g4p7b" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.166356 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-tb8rm"] Jan 04 12:08:26 crc kubenswrapper[5003]: E0104 12:08:26.166974 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-m67b6 ring-data-devices scripts swiftconf], unattached volumes=[], failed to process volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-m67b6 ring-data-devices scripts swiftconf]: context canceled" pod="openstack/swift-ring-rebalance-tb8rm" podUID="9adae396-4260-4e6b-96ad-1e582e112609" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.197279 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-g4p7b"] Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.205257 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/9adae396-4260-4e6b-96ad-1e582e112609-swiftconf\") pod \"swift-ring-rebalance-tb8rm\" (UID: \"9adae396-4260-4e6b-96ad-1e582e112609\") " pod="openstack/swift-ring-rebalance-tb8rm" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.205328 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/37399d86-126f-4327-94c9-f41df343ab62-scripts\") pod \"swift-ring-rebalance-g4p7b\" (UID: \"37399d86-126f-4327-94c9-f41df343ab62\") " pod="openstack/swift-ring-rebalance-g4p7b" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.205410 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/37399d86-126f-4327-94c9-f41df343ab62-dispersionconf\") pod \"swift-ring-rebalance-g4p7b\" (UID: \"37399d86-126f-4327-94c9-f41df343ab62\") " pod="openstack/swift-ring-rebalance-g4p7b" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.205435 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vcmr2\" (UniqueName: \"kubernetes.io/projected/37399d86-126f-4327-94c9-f41df343ab62-kube-api-access-vcmr2\") pod \"swift-ring-rebalance-g4p7b\" (UID: \"37399d86-126f-4327-94c9-f41df343ab62\") " pod="openstack/swift-ring-rebalance-g4p7b" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.205455 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9adae396-4260-4e6b-96ad-1e582e112609-scripts\") pod \"swift-ring-rebalance-tb8rm\" (UID: \"9adae396-4260-4e6b-96ad-1e582e112609\") " pod="openstack/swift-ring-rebalance-tb8rm" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.205477 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/9adae396-4260-4e6b-96ad-1e582e112609-etc-swift\") pod \"swift-ring-rebalance-tb8rm\" (UID: \"9adae396-4260-4e6b-96ad-1e582e112609\") " pod="openstack/swift-ring-rebalance-tb8rm" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.205499 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37399d86-126f-4327-94c9-f41df343ab62-combined-ca-bundle\") pod \"swift-ring-rebalance-g4p7b\" (UID: \"37399d86-126f-4327-94c9-f41df343ab62\") " pod="openstack/swift-ring-rebalance-g4p7b" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.206007 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/37399d86-126f-4327-94c9-f41df343ab62-ring-data-devices\") pod \"swift-ring-rebalance-g4p7b\" (UID: \"37399d86-126f-4327-94c9-f41df343ab62\") " pod="openstack/swift-ring-rebalance-g4p7b" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.206076 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m67b6\" (UniqueName: \"kubernetes.io/projected/9adae396-4260-4e6b-96ad-1e582e112609-kube-api-access-m67b6\") pod \"swift-ring-rebalance-tb8rm\" (UID: \"9adae396-4260-4e6b-96ad-1e582e112609\") " pod="openstack/swift-ring-rebalance-tb8rm" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.206149 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/9adae396-4260-4e6b-96ad-1e582e112609-dispersionconf\") pod \"swift-ring-rebalance-tb8rm\" (UID: \"9adae396-4260-4e6b-96ad-1e582e112609\") " pod="openstack/swift-ring-rebalance-tb8rm" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.206289 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/37399d86-126f-4327-94c9-f41df343ab62-etc-swift\") pod \"swift-ring-rebalance-g4p7b\" (UID: \"37399d86-126f-4327-94c9-f41df343ab62\") " pod="openstack/swift-ring-rebalance-g4p7b" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.206321 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/37399d86-126f-4327-94c9-f41df343ab62-swiftconf\") pod \"swift-ring-rebalance-g4p7b\" (UID: \"37399d86-126f-4327-94c9-f41df343ab62\") " pod="openstack/swift-ring-rebalance-g4p7b" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.206375 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/9adae396-4260-4e6b-96ad-1e582e112609-ring-data-devices\") pod \"swift-ring-rebalance-tb8rm\" (UID: \"9adae396-4260-4e6b-96ad-1e582e112609\") " pod="openstack/swift-ring-rebalance-tb8rm" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.206396 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9adae396-4260-4e6b-96ad-1e582e112609-combined-ca-bundle\") pod \"swift-ring-rebalance-tb8rm\" (UID: \"9adae396-4260-4e6b-96ad-1e582e112609\") " pod="openstack/swift-ring-rebalance-tb8rm" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.307216 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/37399d86-126f-4327-94c9-f41df343ab62-etc-swift\") pod \"swift-ring-rebalance-g4p7b\" (UID: \"37399d86-126f-4327-94c9-f41df343ab62\") " pod="openstack/swift-ring-rebalance-g4p7b" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.307262 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/37399d86-126f-4327-94c9-f41df343ab62-swiftconf\") pod \"swift-ring-rebalance-g4p7b\" (UID: \"37399d86-126f-4327-94c9-f41df343ab62\") " pod="openstack/swift-ring-rebalance-g4p7b" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.307283 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/9adae396-4260-4e6b-96ad-1e582e112609-ring-data-devices\") pod \"swift-ring-rebalance-tb8rm\" (UID: \"9adae396-4260-4e6b-96ad-1e582e112609\") " pod="openstack/swift-ring-rebalance-tb8rm" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.307300 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9adae396-4260-4e6b-96ad-1e582e112609-combined-ca-bundle\") pod \"swift-ring-rebalance-tb8rm\" (UID: \"9adae396-4260-4e6b-96ad-1e582e112609\") " pod="openstack/swift-ring-rebalance-tb8rm" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.307332 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/9adae396-4260-4e6b-96ad-1e582e112609-swiftconf\") pod \"swift-ring-rebalance-tb8rm\" (UID: \"9adae396-4260-4e6b-96ad-1e582e112609\") " pod="openstack/swift-ring-rebalance-tb8rm" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.307348 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/37399d86-126f-4327-94c9-f41df343ab62-scripts\") pod \"swift-ring-rebalance-g4p7b\" (UID: \"37399d86-126f-4327-94c9-f41df343ab62\") " pod="openstack/swift-ring-rebalance-g4p7b" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.307387 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/37399d86-126f-4327-94c9-f41df343ab62-dispersionconf\") pod \"swift-ring-rebalance-g4p7b\" (UID: \"37399d86-126f-4327-94c9-f41df343ab62\") " pod="openstack/swift-ring-rebalance-g4p7b" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.307407 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vcmr2\" (UniqueName: \"kubernetes.io/projected/37399d86-126f-4327-94c9-f41df343ab62-kube-api-access-vcmr2\") pod \"swift-ring-rebalance-g4p7b\" (UID: \"37399d86-126f-4327-94c9-f41df343ab62\") " pod="openstack/swift-ring-rebalance-g4p7b" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.307422 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9adae396-4260-4e6b-96ad-1e582e112609-scripts\") pod \"swift-ring-rebalance-tb8rm\" (UID: \"9adae396-4260-4e6b-96ad-1e582e112609\") " pod="openstack/swift-ring-rebalance-tb8rm" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.307437 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/9adae396-4260-4e6b-96ad-1e582e112609-etc-swift\") pod \"swift-ring-rebalance-tb8rm\" (UID: \"9adae396-4260-4e6b-96ad-1e582e112609\") " pod="openstack/swift-ring-rebalance-tb8rm" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.307452 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37399d86-126f-4327-94c9-f41df343ab62-combined-ca-bundle\") pod \"swift-ring-rebalance-g4p7b\" (UID: \"37399d86-126f-4327-94c9-f41df343ab62\") " pod="openstack/swift-ring-rebalance-g4p7b" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.307478 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e387635d-9ef2-4b1d-9303-0d762e8b282c-etc-swift\") pod \"swift-storage-0\" (UID: \"e387635d-9ef2-4b1d-9303-0d762e8b282c\") " pod="openstack/swift-storage-0" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.307512 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/37399d86-126f-4327-94c9-f41df343ab62-ring-data-devices\") pod \"swift-ring-rebalance-g4p7b\" (UID: \"37399d86-126f-4327-94c9-f41df343ab62\") " pod="openstack/swift-ring-rebalance-g4p7b" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.307528 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m67b6\" (UniqueName: \"kubernetes.io/projected/9adae396-4260-4e6b-96ad-1e582e112609-kube-api-access-m67b6\") pod \"swift-ring-rebalance-tb8rm\" (UID: \"9adae396-4260-4e6b-96ad-1e582e112609\") " pod="openstack/swift-ring-rebalance-tb8rm" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.307548 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/9adae396-4260-4e6b-96ad-1e582e112609-dispersionconf\") pod \"swift-ring-rebalance-tb8rm\" (UID: \"9adae396-4260-4e6b-96ad-1e582e112609\") " pod="openstack/swift-ring-rebalance-tb8rm" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.308709 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/37399d86-126f-4327-94c9-f41df343ab62-etc-swift\") pod \"swift-ring-rebalance-g4p7b\" (UID: \"37399d86-126f-4327-94c9-f41df343ab62\") " pod="openstack/swift-ring-rebalance-g4p7b" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.309224 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/37399d86-126f-4327-94c9-f41df343ab62-scripts\") pod \"swift-ring-rebalance-g4p7b\" (UID: \"37399d86-126f-4327-94c9-f41df343ab62\") " pod="openstack/swift-ring-rebalance-g4p7b" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.309400 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/9adae396-4260-4e6b-96ad-1e582e112609-etc-swift\") pod \"swift-ring-rebalance-tb8rm\" (UID: \"9adae396-4260-4e6b-96ad-1e582e112609\") " pod="openstack/swift-ring-rebalance-tb8rm" Jan 04 12:08:26 crc kubenswrapper[5003]: E0104 12:08:26.309448 5003 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 04 12:08:26 crc kubenswrapper[5003]: E0104 12:08:26.309474 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.309456 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9adae396-4260-4e6b-96ad-1e582e112609-scripts\") pod \"swift-ring-rebalance-tb8rm\" (UID: \"9adae396-4260-4e6b-96ad-1e582e112609\") " pod="openstack/swift-ring-rebalance-tb8rm" Jan 04 12:08:26 crc kubenswrapper[5003]: E0104 12:08:26.309528 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e387635d-9ef2-4b1d-9303-0d762e8b282c-etc-swift podName:e387635d-9ef2-4b1d-9303-0d762e8b282c nodeName:}" failed. No retries permitted until 2026-01-04 12:08:27.309507449 +0000 UTC m=+1222.782537370 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e387635d-9ef2-4b1d-9303-0d762e8b282c-etc-swift") pod "swift-storage-0" (UID: "e387635d-9ef2-4b1d-9303-0d762e8b282c") : configmap "swift-ring-files" not found Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.309800 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/37399d86-126f-4327-94c9-f41df343ab62-ring-data-devices\") pod \"swift-ring-rebalance-g4p7b\" (UID: \"37399d86-126f-4327-94c9-f41df343ab62\") " pod="openstack/swift-ring-rebalance-g4p7b" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.310406 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/9adae396-4260-4e6b-96ad-1e582e112609-ring-data-devices\") pod \"swift-ring-rebalance-tb8rm\" (UID: \"9adae396-4260-4e6b-96ad-1e582e112609\") " pod="openstack/swift-ring-rebalance-tb8rm" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.311770 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/37399d86-126f-4327-94c9-f41df343ab62-dispersionconf\") pod \"swift-ring-rebalance-g4p7b\" (UID: \"37399d86-126f-4327-94c9-f41df343ab62\") " pod="openstack/swift-ring-rebalance-g4p7b" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.311904 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/9adae396-4260-4e6b-96ad-1e582e112609-dispersionconf\") pod \"swift-ring-rebalance-tb8rm\" (UID: \"9adae396-4260-4e6b-96ad-1e582e112609\") " pod="openstack/swift-ring-rebalance-tb8rm" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.313034 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9adae396-4260-4e6b-96ad-1e582e112609-combined-ca-bundle\") pod \"swift-ring-rebalance-tb8rm\" (UID: \"9adae396-4260-4e6b-96ad-1e582e112609\") " pod="openstack/swift-ring-rebalance-tb8rm" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.315472 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/9adae396-4260-4e6b-96ad-1e582e112609-swiftconf\") pod \"swift-ring-rebalance-tb8rm\" (UID: \"9adae396-4260-4e6b-96ad-1e582e112609\") " pod="openstack/swift-ring-rebalance-tb8rm" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.315970 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37399d86-126f-4327-94c9-f41df343ab62-combined-ca-bundle\") pod \"swift-ring-rebalance-g4p7b\" (UID: \"37399d86-126f-4327-94c9-f41df343ab62\") " pod="openstack/swift-ring-rebalance-g4p7b" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.325906 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/37399d86-126f-4327-94c9-f41df343ab62-swiftconf\") pod \"swift-ring-rebalance-g4p7b\" (UID: \"37399d86-126f-4327-94c9-f41df343ab62\") " pod="openstack/swift-ring-rebalance-g4p7b" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.326121 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m67b6\" (UniqueName: \"kubernetes.io/projected/9adae396-4260-4e6b-96ad-1e582e112609-kube-api-access-m67b6\") pod \"swift-ring-rebalance-tb8rm\" (UID: \"9adae396-4260-4e6b-96ad-1e582e112609\") " pod="openstack/swift-ring-rebalance-tb8rm" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.327878 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vcmr2\" (UniqueName: \"kubernetes.io/projected/37399d86-126f-4327-94c9-f41df343ab62-kube-api-access-vcmr2\") pod \"swift-ring-rebalance-g4p7b\" (UID: \"37399d86-126f-4327-94c9-f41df343ab62\") " pod="openstack/swift-ring-rebalance-g4p7b" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.480610 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-g4p7b" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.596202 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-tb8rm" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.596203 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67fdf7998c-npwcd" event={"ID":"33a31de1-636d-416a-a0a3-1f5cb2de8ce1","Type":"ContainerStarted","Data":"063905b1821057f05e527f5e64b236cf14da463fb596547b824d8ec3c45b2ac8"} Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.596980 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67fdf7998c-npwcd" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.615444 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-tb8rm" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.632828 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-67fdf7998c-npwcd" podStartSLOduration=2.632801719 podStartE2EDuration="2.632801719s" podCreationTimestamp="2026-01-04 12:08:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:08:26.622901342 +0000 UTC m=+1222.095931203" watchObservedRunningTime="2026-01-04 12:08:26.632801719 +0000 UTC m=+1222.105831560" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.715986 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9adae396-4260-4e6b-96ad-1e582e112609-scripts\") pod \"9adae396-4260-4e6b-96ad-1e582e112609\" (UID: \"9adae396-4260-4e6b-96ad-1e582e112609\") " Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.716048 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/9adae396-4260-4e6b-96ad-1e582e112609-swiftconf\") pod \"9adae396-4260-4e6b-96ad-1e582e112609\" (UID: \"9adae396-4260-4e6b-96ad-1e582e112609\") " Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.716148 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/9adae396-4260-4e6b-96ad-1e582e112609-dispersionconf\") pod \"9adae396-4260-4e6b-96ad-1e582e112609\" (UID: \"9adae396-4260-4e6b-96ad-1e582e112609\") " Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.716240 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/9adae396-4260-4e6b-96ad-1e582e112609-ring-data-devices\") pod \"9adae396-4260-4e6b-96ad-1e582e112609\" (UID: \"9adae396-4260-4e6b-96ad-1e582e112609\") " Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.716273 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9adae396-4260-4e6b-96ad-1e582e112609-combined-ca-bundle\") pod \"9adae396-4260-4e6b-96ad-1e582e112609\" (UID: \"9adae396-4260-4e6b-96ad-1e582e112609\") " Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.716298 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/9adae396-4260-4e6b-96ad-1e582e112609-etc-swift\") pod \"9adae396-4260-4e6b-96ad-1e582e112609\" (UID: \"9adae396-4260-4e6b-96ad-1e582e112609\") " Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.716332 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m67b6\" (UniqueName: \"kubernetes.io/projected/9adae396-4260-4e6b-96ad-1e582e112609-kube-api-access-m67b6\") pod \"9adae396-4260-4e6b-96ad-1e582e112609\" (UID: \"9adae396-4260-4e6b-96ad-1e582e112609\") " Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.718312 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9adae396-4260-4e6b-96ad-1e582e112609-scripts" (OuterVolumeSpecName: "scripts") pod "9adae396-4260-4e6b-96ad-1e582e112609" (UID: "9adae396-4260-4e6b-96ad-1e582e112609"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.720158 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9adae396-4260-4e6b-96ad-1e582e112609-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "9adae396-4260-4e6b-96ad-1e582e112609" (UID: "9adae396-4260-4e6b-96ad-1e582e112609"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.721877 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9adae396-4260-4e6b-96ad-1e582e112609-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "9adae396-4260-4e6b-96ad-1e582e112609" (UID: "9adae396-4260-4e6b-96ad-1e582e112609"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.724663 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9adae396-4260-4e6b-96ad-1e582e112609-kube-api-access-m67b6" (OuterVolumeSpecName: "kube-api-access-m67b6") pod "9adae396-4260-4e6b-96ad-1e582e112609" (UID: "9adae396-4260-4e6b-96ad-1e582e112609"). InnerVolumeSpecName "kube-api-access-m67b6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.725892 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9adae396-4260-4e6b-96ad-1e582e112609-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "9adae396-4260-4e6b-96ad-1e582e112609" (UID: "9adae396-4260-4e6b-96ad-1e582e112609"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.734277 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9adae396-4260-4e6b-96ad-1e582e112609-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9adae396-4260-4e6b-96ad-1e582e112609" (UID: "9adae396-4260-4e6b-96ad-1e582e112609"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.737921 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9adae396-4260-4e6b-96ad-1e582e112609-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "9adae396-4260-4e6b-96ad-1e582e112609" (UID: "9adae396-4260-4e6b-96ad-1e582e112609"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.819236 5003 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/9adae396-4260-4e6b-96ad-1e582e112609-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.819268 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9adae396-4260-4e6b-96ad-1e582e112609-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.819279 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/9adae396-4260-4e6b-96ad-1e582e112609-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.819288 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m67b6\" (UniqueName: \"kubernetes.io/projected/9adae396-4260-4e6b-96ad-1e582e112609-kube-api-access-m67b6\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.819300 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9adae396-4260-4e6b-96ad-1e582e112609-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.819327 5003 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/9adae396-4260-4e6b-96ad-1e582e112609-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.819337 5003 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/9adae396-4260-4e6b-96ad-1e582e112609-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:26 crc kubenswrapper[5003]: I0104 12:08:26.975541 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-g4p7b"] Jan 04 12:08:27 crc kubenswrapper[5003]: I0104 12:08:27.336609 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e387635d-9ef2-4b1d-9303-0d762e8b282c-etc-swift\") pod \"swift-storage-0\" (UID: \"e387635d-9ef2-4b1d-9303-0d762e8b282c\") " pod="openstack/swift-storage-0" Jan 04 12:08:27 crc kubenswrapper[5003]: E0104 12:08:27.336802 5003 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 04 12:08:27 crc kubenswrapper[5003]: E0104 12:08:27.336821 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 04 12:08:27 crc kubenswrapper[5003]: E0104 12:08:27.336880 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e387635d-9ef2-4b1d-9303-0d762e8b282c-etc-swift podName:e387635d-9ef2-4b1d-9303-0d762e8b282c nodeName:}" failed. No retries permitted until 2026-01-04 12:08:29.336860685 +0000 UTC m=+1224.809890526 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e387635d-9ef2-4b1d-9303-0d762e8b282c-etc-swift") pod "swift-storage-0" (UID: "e387635d-9ef2-4b1d-9303-0d762e8b282c") : configmap "swift-ring-files" not found Jan 04 12:08:27 crc kubenswrapper[5003]: I0104 12:08:27.616975 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-g4p7b" event={"ID":"37399d86-126f-4327-94c9-f41df343ab62","Type":"ContainerStarted","Data":"30acd2dffc7df832200ca0701532929d9e009cc2aad317da3b0a3e6905329066"} Jan 04 12:08:27 crc kubenswrapper[5003]: I0104 12:08:27.617124 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-tb8rm" Jan 04 12:08:27 crc kubenswrapper[5003]: I0104 12:08:27.702693 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-tb8rm"] Jan 04 12:08:27 crc kubenswrapper[5003]: I0104 12:08:27.710124 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-tb8rm"] Jan 04 12:08:27 crc kubenswrapper[5003]: I0104 12:08:27.827436 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-lph7c"] Jan 04 12:08:27 crc kubenswrapper[5003]: I0104 12:08:27.828575 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-lph7c" Jan 04 12:08:27 crc kubenswrapper[5003]: I0104 12:08:27.837648 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-70b4-account-create-update-jmrxl"] Jan 04 12:08:27 crc kubenswrapper[5003]: I0104 12:08:27.838779 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-70b4-account-create-update-jmrxl" Jan 04 12:08:27 crc kubenswrapper[5003]: I0104 12:08:27.841856 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Jan 04 12:08:27 crc kubenswrapper[5003]: I0104 12:08:27.846792 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5838830-ab2d-4d0d-ab22-7448352db030-operator-scripts\") pod \"glance-db-create-lph7c\" (UID: \"e5838830-ab2d-4d0d-ab22-7448352db030\") " pod="openstack/glance-db-create-lph7c" Jan 04 12:08:27 crc kubenswrapper[5003]: I0104 12:08:27.846875 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ad51468-82d2-4b49-b778-f2d296eafbf1-operator-scripts\") pod \"glance-70b4-account-create-update-jmrxl\" (UID: \"4ad51468-82d2-4b49-b778-f2d296eafbf1\") " pod="openstack/glance-70b4-account-create-update-jmrxl" Jan 04 12:08:27 crc kubenswrapper[5003]: I0104 12:08:27.846946 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-plv8z\" (UniqueName: \"kubernetes.io/projected/4ad51468-82d2-4b49-b778-f2d296eafbf1-kube-api-access-plv8z\") pod \"glance-70b4-account-create-update-jmrxl\" (UID: \"4ad51468-82d2-4b49-b778-f2d296eafbf1\") " pod="openstack/glance-70b4-account-create-update-jmrxl" Jan 04 12:08:27 crc kubenswrapper[5003]: I0104 12:08:27.847105 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5gh9\" (UniqueName: \"kubernetes.io/projected/e5838830-ab2d-4d0d-ab22-7448352db030-kube-api-access-h5gh9\") pod \"glance-db-create-lph7c\" (UID: \"e5838830-ab2d-4d0d-ab22-7448352db030\") " pod="openstack/glance-db-create-lph7c" Jan 04 12:08:27 crc kubenswrapper[5003]: I0104 12:08:27.849396 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-lph7c"] Jan 04 12:08:27 crc kubenswrapper[5003]: I0104 12:08:27.874364 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-70b4-account-create-update-jmrxl"] Jan 04 12:08:28 crc kubenswrapper[5003]: I0104 12:08:28.009876 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5gh9\" (UniqueName: \"kubernetes.io/projected/e5838830-ab2d-4d0d-ab22-7448352db030-kube-api-access-h5gh9\") pod \"glance-db-create-lph7c\" (UID: \"e5838830-ab2d-4d0d-ab22-7448352db030\") " pod="openstack/glance-db-create-lph7c" Jan 04 12:08:28 crc kubenswrapper[5003]: I0104 12:08:28.009964 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5838830-ab2d-4d0d-ab22-7448352db030-operator-scripts\") pod \"glance-db-create-lph7c\" (UID: \"e5838830-ab2d-4d0d-ab22-7448352db030\") " pod="openstack/glance-db-create-lph7c" Jan 04 12:08:28 crc kubenswrapper[5003]: I0104 12:08:28.010005 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ad51468-82d2-4b49-b778-f2d296eafbf1-operator-scripts\") pod \"glance-70b4-account-create-update-jmrxl\" (UID: \"4ad51468-82d2-4b49-b778-f2d296eafbf1\") " pod="openstack/glance-70b4-account-create-update-jmrxl" Jan 04 12:08:28 crc kubenswrapper[5003]: I0104 12:08:28.010069 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-plv8z\" (UniqueName: \"kubernetes.io/projected/4ad51468-82d2-4b49-b778-f2d296eafbf1-kube-api-access-plv8z\") pod \"glance-70b4-account-create-update-jmrxl\" (UID: \"4ad51468-82d2-4b49-b778-f2d296eafbf1\") " pod="openstack/glance-70b4-account-create-update-jmrxl" Jan 04 12:08:28 crc kubenswrapper[5003]: I0104 12:08:28.010687 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5838830-ab2d-4d0d-ab22-7448352db030-operator-scripts\") pod \"glance-db-create-lph7c\" (UID: \"e5838830-ab2d-4d0d-ab22-7448352db030\") " pod="openstack/glance-db-create-lph7c" Jan 04 12:08:28 crc kubenswrapper[5003]: I0104 12:08:28.010844 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ad51468-82d2-4b49-b778-f2d296eafbf1-operator-scripts\") pod \"glance-70b4-account-create-update-jmrxl\" (UID: \"4ad51468-82d2-4b49-b778-f2d296eafbf1\") " pod="openstack/glance-70b4-account-create-update-jmrxl" Jan 04 12:08:28 crc kubenswrapper[5003]: I0104 12:08:28.037630 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-plv8z\" (UniqueName: \"kubernetes.io/projected/4ad51468-82d2-4b49-b778-f2d296eafbf1-kube-api-access-plv8z\") pod \"glance-70b4-account-create-update-jmrxl\" (UID: \"4ad51468-82d2-4b49-b778-f2d296eafbf1\") " pod="openstack/glance-70b4-account-create-update-jmrxl" Jan 04 12:08:28 crc kubenswrapper[5003]: I0104 12:08:28.039144 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5gh9\" (UniqueName: \"kubernetes.io/projected/e5838830-ab2d-4d0d-ab22-7448352db030-kube-api-access-h5gh9\") pod \"glance-db-create-lph7c\" (UID: \"e5838830-ab2d-4d0d-ab22-7448352db030\") " pod="openstack/glance-db-create-lph7c" Jan 04 12:08:28 crc kubenswrapper[5003]: I0104 12:08:28.149249 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-lph7c" Jan 04 12:08:28 crc kubenswrapper[5003]: I0104 12:08:28.167269 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-70b4-account-create-update-jmrxl" Jan 04 12:08:28 crc kubenswrapper[5003]: I0104 12:08:28.620745 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-70b4-account-create-update-jmrxl"] Jan 04 12:08:28 crc kubenswrapper[5003]: I0104 12:08:28.698333 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-lph7c"] Jan 04 12:08:28 crc kubenswrapper[5003]: I0104 12:08:28.823573 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9adae396-4260-4e6b-96ad-1e582e112609" path="/var/lib/kubelet/pods/9adae396-4260-4e6b-96ad-1e582e112609/volumes" Jan 04 12:08:28 crc kubenswrapper[5003]: I0104 12:08:28.995526 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-77cfc"] Jan 04 12:08:29 crc kubenswrapper[5003]: I0104 12:08:29.014526 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-77cfc"] Jan 04 12:08:29 crc kubenswrapper[5003]: I0104 12:08:29.102029 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-trt8p"] Jan 04 12:08:29 crc kubenswrapper[5003]: I0104 12:08:29.103295 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-trt8p" Jan 04 12:08:29 crc kubenswrapper[5003]: I0104 12:08:29.108480 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-trt8p"] Jan 04 12:08:29 crc kubenswrapper[5003]: I0104 12:08:29.135242 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Jan 04 12:08:29 crc kubenswrapper[5003]: I0104 12:08:29.135816 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gqkqq\" (UniqueName: \"kubernetes.io/projected/da444afc-236b-465d-a105-f6c3b25f677c-kube-api-access-gqkqq\") pod \"root-account-create-update-trt8p\" (UID: \"da444afc-236b-465d-a105-f6c3b25f677c\") " pod="openstack/root-account-create-update-trt8p" Jan 04 12:08:29 crc kubenswrapper[5003]: I0104 12:08:29.135920 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/da444afc-236b-465d-a105-f6c3b25f677c-operator-scripts\") pod \"root-account-create-update-trt8p\" (UID: \"da444afc-236b-465d-a105-f6c3b25f677c\") " pod="openstack/root-account-create-update-trt8p" Jan 04 12:08:29 crc kubenswrapper[5003]: I0104 12:08:29.237749 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/da444afc-236b-465d-a105-f6c3b25f677c-operator-scripts\") pod \"root-account-create-update-trt8p\" (UID: \"da444afc-236b-465d-a105-f6c3b25f677c\") " pod="openstack/root-account-create-update-trt8p" Jan 04 12:08:29 crc kubenswrapper[5003]: I0104 12:08:29.238075 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gqkqq\" (UniqueName: \"kubernetes.io/projected/da444afc-236b-465d-a105-f6c3b25f677c-kube-api-access-gqkqq\") pod \"root-account-create-update-trt8p\" (UID: \"da444afc-236b-465d-a105-f6c3b25f677c\") " pod="openstack/root-account-create-update-trt8p" Jan 04 12:08:29 crc kubenswrapper[5003]: I0104 12:08:29.238843 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/da444afc-236b-465d-a105-f6c3b25f677c-operator-scripts\") pod \"root-account-create-update-trt8p\" (UID: \"da444afc-236b-465d-a105-f6c3b25f677c\") " pod="openstack/root-account-create-update-trt8p" Jan 04 12:08:29 crc kubenswrapper[5003]: I0104 12:08:29.277372 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gqkqq\" (UniqueName: \"kubernetes.io/projected/da444afc-236b-465d-a105-f6c3b25f677c-kube-api-access-gqkqq\") pod \"root-account-create-update-trt8p\" (UID: \"da444afc-236b-465d-a105-f6c3b25f677c\") " pod="openstack/root-account-create-update-trt8p" Jan 04 12:08:29 crc kubenswrapper[5003]: I0104 12:08:29.339549 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e387635d-9ef2-4b1d-9303-0d762e8b282c-etc-swift\") pod \"swift-storage-0\" (UID: \"e387635d-9ef2-4b1d-9303-0d762e8b282c\") " pod="openstack/swift-storage-0" Jan 04 12:08:29 crc kubenswrapper[5003]: E0104 12:08:29.339830 5003 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 04 12:08:29 crc kubenswrapper[5003]: E0104 12:08:29.339873 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 04 12:08:29 crc kubenswrapper[5003]: E0104 12:08:29.339942 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e387635d-9ef2-4b1d-9303-0d762e8b282c-etc-swift podName:e387635d-9ef2-4b1d-9303-0d762e8b282c nodeName:}" failed. No retries permitted until 2026-01-04 12:08:33.339920504 +0000 UTC m=+1228.812950345 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e387635d-9ef2-4b1d-9303-0d762e8b282c-etc-swift") pod "swift-storage-0" (UID: "e387635d-9ef2-4b1d-9303-0d762e8b282c") : configmap "swift-ring-files" not found Jan 04 12:08:29 crc kubenswrapper[5003]: I0104 12:08:29.455035 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-trt8p" Jan 04 12:08:29 crc kubenswrapper[5003]: W0104 12:08:29.580239 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode5838830_ab2d_4d0d_ab22_7448352db030.slice/crio-3bce6e4a2c1e7a5b92de173862eb2c36b82f4d7a2aec5ab31a032db752ab7eb8 WatchSource:0}: Error finding container 3bce6e4a2c1e7a5b92de173862eb2c36b82f4d7a2aec5ab31a032db752ab7eb8: Status 404 returned error can't find the container with id 3bce6e4a2c1e7a5b92de173862eb2c36b82f4d7a2aec5ab31a032db752ab7eb8 Jan 04 12:08:29 crc kubenswrapper[5003]: I0104 12:08:29.634848 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-70b4-account-create-update-jmrxl" event={"ID":"4ad51468-82d2-4b49-b778-f2d296eafbf1","Type":"ContainerStarted","Data":"491cc6a58aec592f9ecc1a8edf7594d79aef695a695ce5643eec2825135576dc"} Jan 04 12:08:29 crc kubenswrapper[5003]: I0104 12:08:29.636394 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-lph7c" event={"ID":"e5838830-ab2d-4d0d-ab22-7448352db030","Type":"ContainerStarted","Data":"3bce6e4a2c1e7a5b92de173862eb2c36b82f4d7a2aec5ab31a032db752ab7eb8"} Jan 04 12:08:30 crc kubenswrapper[5003]: I0104 12:08:30.815493 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70fa4b5a-f151-4372-b6e5-de3d360e6734" path="/var/lib/kubelet/pods/70fa4b5a-f151-4372-b6e5-de3d360e6734/volumes" Jan 04 12:08:31 crc kubenswrapper[5003]: I0104 12:08:31.117418 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-trt8p"] Jan 04 12:08:31 crc kubenswrapper[5003]: W0104 12:08:31.249723 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podda444afc_236b_465d_a105_f6c3b25f677c.slice/crio-14fedee265e9f1301b4256b06d046a0de52d138d124a0b3cddfb4b33fdb4b3a3 WatchSource:0}: Error finding container 14fedee265e9f1301b4256b06d046a0de52d138d124a0b3cddfb4b33fdb4b3a3: Status 404 returned error can't find the container with id 14fedee265e9f1301b4256b06d046a0de52d138d124a0b3cddfb4b33fdb4b3a3 Jan 04 12:08:31 crc kubenswrapper[5003]: I0104 12:08:31.659396 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-g4p7b" event={"ID":"37399d86-126f-4327-94c9-f41df343ab62","Type":"ContainerStarted","Data":"64406d146472e06022b356b2502a9a5cf00a7d3f689122719d04f157d1ad2893"} Jan 04 12:08:31 crc kubenswrapper[5003]: I0104 12:08:31.662994 5003 generic.go:334] "Generic (PLEG): container finished" podID="4ad51468-82d2-4b49-b778-f2d296eafbf1" containerID="09f7d9229fc706dc8bc36a4fe5df19bdd166223a864e15c95d9d84e6a9cbf92a" exitCode=0 Jan 04 12:08:31 crc kubenswrapper[5003]: I0104 12:08:31.663091 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-70b4-account-create-update-jmrxl" event={"ID":"4ad51468-82d2-4b49-b778-f2d296eafbf1","Type":"ContainerDied","Data":"09f7d9229fc706dc8bc36a4fe5df19bdd166223a864e15c95d9d84e6a9cbf92a"} Jan 04 12:08:31 crc kubenswrapper[5003]: I0104 12:08:31.665322 5003 generic.go:334] "Generic (PLEG): container finished" podID="da444afc-236b-465d-a105-f6c3b25f677c" containerID="afa5dc27ad8f20e3388351b3b757361a6afaac2252ca35c4ed585e28ce8564d3" exitCode=0 Jan 04 12:08:31 crc kubenswrapper[5003]: I0104 12:08:31.665370 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-trt8p" event={"ID":"da444afc-236b-465d-a105-f6c3b25f677c","Type":"ContainerDied","Data":"afa5dc27ad8f20e3388351b3b757361a6afaac2252ca35c4ed585e28ce8564d3"} Jan 04 12:08:31 crc kubenswrapper[5003]: I0104 12:08:31.665388 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-trt8p" event={"ID":"da444afc-236b-465d-a105-f6c3b25f677c","Type":"ContainerStarted","Data":"14fedee265e9f1301b4256b06d046a0de52d138d124a0b3cddfb4b33fdb4b3a3"} Jan 04 12:08:31 crc kubenswrapper[5003]: I0104 12:08:31.667897 5003 generic.go:334] "Generic (PLEG): container finished" podID="e5838830-ab2d-4d0d-ab22-7448352db030" containerID="9f2a1056d6dc7247e01e260f80883de2a110e9f1decf54f7559a543884cd14d8" exitCode=0 Jan 04 12:08:31 crc kubenswrapper[5003]: I0104 12:08:31.667928 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-lph7c" event={"ID":"e5838830-ab2d-4d0d-ab22-7448352db030","Type":"ContainerDied","Data":"9f2a1056d6dc7247e01e260f80883de2a110e9f1decf54f7559a543884cd14d8"} Jan 04 12:08:31 crc kubenswrapper[5003]: I0104 12:08:31.670813 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-xfd9f"] Jan 04 12:08:31 crc kubenswrapper[5003]: I0104 12:08:31.672434 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-xfd9f" Jan 04 12:08:31 crc kubenswrapper[5003]: I0104 12:08:31.683513 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-xfd9f"] Jan 04 12:08:31 crc kubenswrapper[5003]: I0104 12:08:31.694567 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/50703d14-b53d-4ef3-8b2a-790b55e0c5d1-operator-scripts\") pod \"keystone-db-create-xfd9f\" (UID: \"50703d14-b53d-4ef3-8b2a-790b55e0c5d1\") " pod="openstack/keystone-db-create-xfd9f" Jan 04 12:08:31 crc kubenswrapper[5003]: I0104 12:08:31.694909 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6fcft\" (UniqueName: \"kubernetes.io/projected/50703d14-b53d-4ef3-8b2a-790b55e0c5d1-kube-api-access-6fcft\") pod \"keystone-db-create-xfd9f\" (UID: \"50703d14-b53d-4ef3-8b2a-790b55e0c5d1\") " pod="openstack/keystone-db-create-xfd9f" Jan 04 12:08:31 crc kubenswrapper[5003]: I0104 12:08:31.700371 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-g4p7b" podStartSLOduration=1.9658830489999999 podStartE2EDuration="5.70035095s" podCreationTimestamp="2026-01-04 12:08:26 +0000 UTC" firstStartedPulling="2026-01-04 12:08:26.98223499 +0000 UTC m=+1222.455264831" lastFinishedPulling="2026-01-04 12:08:30.716702881 +0000 UTC m=+1226.189732732" observedRunningTime="2026-01-04 12:08:31.688307747 +0000 UTC m=+1227.161337618" watchObservedRunningTime="2026-01-04 12:08:31.70035095 +0000 UTC m=+1227.173380811" Jan 04 12:08:31 crc kubenswrapper[5003]: I0104 12:08:31.796776 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6fcft\" (UniqueName: \"kubernetes.io/projected/50703d14-b53d-4ef3-8b2a-790b55e0c5d1-kube-api-access-6fcft\") pod \"keystone-db-create-xfd9f\" (UID: \"50703d14-b53d-4ef3-8b2a-790b55e0c5d1\") " pod="openstack/keystone-db-create-xfd9f" Jan 04 12:08:31 crc kubenswrapper[5003]: I0104 12:08:31.796883 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/50703d14-b53d-4ef3-8b2a-790b55e0c5d1-operator-scripts\") pod \"keystone-db-create-xfd9f\" (UID: \"50703d14-b53d-4ef3-8b2a-790b55e0c5d1\") " pod="openstack/keystone-db-create-xfd9f" Jan 04 12:08:31 crc kubenswrapper[5003]: I0104 12:08:31.797991 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/50703d14-b53d-4ef3-8b2a-790b55e0c5d1-operator-scripts\") pod \"keystone-db-create-xfd9f\" (UID: \"50703d14-b53d-4ef3-8b2a-790b55e0c5d1\") " pod="openstack/keystone-db-create-xfd9f" Jan 04 12:08:31 crc kubenswrapper[5003]: I0104 12:08:31.817347 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6fcft\" (UniqueName: \"kubernetes.io/projected/50703d14-b53d-4ef3-8b2a-790b55e0c5d1-kube-api-access-6fcft\") pod \"keystone-db-create-xfd9f\" (UID: \"50703d14-b53d-4ef3-8b2a-790b55e0c5d1\") " pod="openstack/keystone-db-create-xfd9f" Jan 04 12:08:31 crc kubenswrapper[5003]: I0104 12:08:31.874182 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-788e-account-create-update-bg6xj"] Jan 04 12:08:31 crc kubenswrapper[5003]: I0104 12:08:31.878373 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-788e-account-create-update-bg6xj" Jan 04 12:08:31 crc kubenswrapper[5003]: I0104 12:08:31.885072 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Jan 04 12:08:31 crc kubenswrapper[5003]: I0104 12:08:31.887677 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-788e-account-create-update-bg6xj"] Jan 04 12:08:31 crc kubenswrapper[5003]: I0104 12:08:31.898606 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-glbs5\" (UniqueName: \"kubernetes.io/projected/3121d3df-082a-434a-8bad-4dce3e7d9b09-kube-api-access-glbs5\") pod \"keystone-788e-account-create-update-bg6xj\" (UID: \"3121d3df-082a-434a-8bad-4dce3e7d9b09\") " pod="openstack/keystone-788e-account-create-update-bg6xj" Jan 04 12:08:31 crc kubenswrapper[5003]: I0104 12:08:31.898700 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3121d3df-082a-434a-8bad-4dce3e7d9b09-operator-scripts\") pod \"keystone-788e-account-create-update-bg6xj\" (UID: \"3121d3df-082a-434a-8bad-4dce3e7d9b09\") " pod="openstack/keystone-788e-account-create-update-bg6xj" Jan 04 12:08:32 crc kubenswrapper[5003]: I0104 12:08:32.000420 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-glbs5\" (UniqueName: \"kubernetes.io/projected/3121d3df-082a-434a-8bad-4dce3e7d9b09-kube-api-access-glbs5\") pod \"keystone-788e-account-create-update-bg6xj\" (UID: \"3121d3df-082a-434a-8bad-4dce3e7d9b09\") " pod="openstack/keystone-788e-account-create-update-bg6xj" Jan 04 12:08:32 crc kubenswrapper[5003]: I0104 12:08:32.000499 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3121d3df-082a-434a-8bad-4dce3e7d9b09-operator-scripts\") pod \"keystone-788e-account-create-update-bg6xj\" (UID: \"3121d3df-082a-434a-8bad-4dce3e7d9b09\") " pod="openstack/keystone-788e-account-create-update-bg6xj" Jan 04 12:08:32 crc kubenswrapper[5003]: I0104 12:08:32.001763 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3121d3df-082a-434a-8bad-4dce3e7d9b09-operator-scripts\") pod \"keystone-788e-account-create-update-bg6xj\" (UID: \"3121d3df-082a-434a-8bad-4dce3e7d9b09\") " pod="openstack/keystone-788e-account-create-update-bg6xj" Jan 04 12:08:32 crc kubenswrapper[5003]: I0104 12:08:32.038761 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-xfd9f" Jan 04 12:08:32 crc kubenswrapper[5003]: I0104 12:08:32.039615 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-glbs5\" (UniqueName: \"kubernetes.io/projected/3121d3df-082a-434a-8bad-4dce3e7d9b09-kube-api-access-glbs5\") pod \"keystone-788e-account-create-update-bg6xj\" (UID: \"3121d3df-082a-434a-8bad-4dce3e7d9b09\") " pod="openstack/keystone-788e-account-create-update-bg6xj" Jan 04 12:08:32 crc kubenswrapper[5003]: I0104 12:08:32.195430 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-788e-account-create-update-bg6xj" Jan 04 12:08:32 crc kubenswrapper[5003]: I0104 12:08:32.310870 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-xfd9f"] Jan 04 12:08:32 crc kubenswrapper[5003]: W0104 12:08:32.325752 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod50703d14_b53d_4ef3_8b2a_790b55e0c5d1.slice/crio-a604d9b8548684a88175e71265eb441c7e554b032001978e93d0715838614892 WatchSource:0}: Error finding container a604d9b8548684a88175e71265eb441c7e554b032001978e93d0715838614892: Status 404 returned error can't find the container with id a604d9b8548684a88175e71265eb441c7e554b032001978e93d0715838614892 Jan 04 12:08:32 crc kubenswrapper[5003]: I0104 12:08:32.481336 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-fdswd" podUID="c88e1443-25c4-4e67-83d0-e43cef2b2e5c" containerName="ovn-controller" probeResult="failure" output=< Jan 04 12:08:32 crc kubenswrapper[5003]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Jan 04 12:08:32 crc kubenswrapper[5003]: > Jan 04 12:08:32 crc kubenswrapper[5003]: I0104 12:08:32.527654 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-99mjg" Jan 04 12:08:32 crc kubenswrapper[5003]: I0104 12:08:32.576637 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-99mjg" Jan 04 12:08:32 crc kubenswrapper[5003]: I0104 12:08:32.680268 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-xfd9f" event={"ID":"50703d14-b53d-4ef3-8b2a-790b55e0c5d1","Type":"ContainerStarted","Data":"5bb101871bdcd0c3e3c7a985d0c07f6658b8187319508b5024f9eedf1dfb8867"} Jan 04 12:08:32 crc kubenswrapper[5003]: I0104 12:08:32.680693 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-xfd9f" event={"ID":"50703d14-b53d-4ef3-8b2a-790b55e0c5d1","Type":"ContainerStarted","Data":"a604d9b8548684a88175e71265eb441c7e554b032001978e93d0715838614892"} Jan 04 12:08:32 crc kubenswrapper[5003]: I0104 12:08:32.708804 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-create-xfd9f" podStartSLOduration=1.708783964 podStartE2EDuration="1.708783964s" podCreationTimestamp="2026-01-04 12:08:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:08:32.705828597 +0000 UTC m=+1228.178858438" watchObservedRunningTime="2026-01-04 12:08:32.708783964 +0000 UTC m=+1228.181813805" Jan 04 12:08:32 crc kubenswrapper[5003]: I0104 12:08:32.740639 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-788e-account-create-update-bg6xj"] Jan 04 12:08:32 crc kubenswrapper[5003]: W0104 12:08:32.767165 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3121d3df_082a_434a_8bad_4dce3e7d9b09.slice/crio-33d3d9e5189d1d75685ba7fb131b465cba1a0f92b05c6d2537945bf8d847956d WatchSource:0}: Error finding container 33d3d9e5189d1d75685ba7fb131b465cba1a0f92b05c6d2537945bf8d847956d: Status 404 returned error can't find the container with id 33d3d9e5189d1d75685ba7fb131b465cba1a0f92b05c6d2537945bf8d847956d Jan 04 12:08:32 crc kubenswrapper[5003]: I0104 12:08:32.892521 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-fdswd-config-8d7cr"] Jan 04 12:08:32 crc kubenswrapper[5003]: I0104 12:08:32.893830 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-fdswd-config-8d7cr" Jan 04 12:08:32 crc kubenswrapper[5003]: I0104 12:08:32.914984 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Jan 04 12:08:32 crc kubenswrapper[5003]: I0104 12:08:32.921419 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c6917c5b-df80-431c-af8f-e126bfac96b2-var-run-ovn\") pod \"ovn-controller-fdswd-config-8d7cr\" (UID: \"c6917c5b-df80-431c-af8f-e126bfac96b2\") " pod="openstack/ovn-controller-fdswd-config-8d7cr" Jan 04 12:08:32 crc kubenswrapper[5003]: I0104 12:08:32.921495 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/c6917c5b-df80-431c-af8f-e126bfac96b2-additional-scripts\") pod \"ovn-controller-fdswd-config-8d7cr\" (UID: \"c6917c5b-df80-431c-af8f-e126bfac96b2\") " pod="openstack/ovn-controller-fdswd-config-8d7cr" Jan 04 12:08:32 crc kubenswrapper[5003]: I0104 12:08:32.921519 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c6917c5b-df80-431c-af8f-e126bfac96b2-var-run\") pod \"ovn-controller-fdswd-config-8d7cr\" (UID: \"c6917c5b-df80-431c-af8f-e126bfac96b2\") " pod="openstack/ovn-controller-fdswd-config-8d7cr" Jan 04 12:08:32 crc kubenswrapper[5003]: I0104 12:08:32.921536 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c6917c5b-df80-431c-af8f-e126bfac96b2-var-log-ovn\") pod \"ovn-controller-fdswd-config-8d7cr\" (UID: \"c6917c5b-df80-431c-af8f-e126bfac96b2\") " pod="openstack/ovn-controller-fdswd-config-8d7cr" Jan 04 12:08:32 crc kubenswrapper[5003]: I0104 12:08:32.921637 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zswt4\" (UniqueName: \"kubernetes.io/projected/c6917c5b-df80-431c-af8f-e126bfac96b2-kube-api-access-zswt4\") pod \"ovn-controller-fdswd-config-8d7cr\" (UID: \"c6917c5b-df80-431c-af8f-e126bfac96b2\") " pod="openstack/ovn-controller-fdswd-config-8d7cr" Jan 04 12:08:32 crc kubenswrapper[5003]: I0104 12:08:32.921719 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c6917c5b-df80-431c-af8f-e126bfac96b2-scripts\") pod \"ovn-controller-fdswd-config-8d7cr\" (UID: \"c6917c5b-df80-431c-af8f-e126bfac96b2\") " pod="openstack/ovn-controller-fdswd-config-8d7cr" Jan 04 12:08:32 crc kubenswrapper[5003]: I0104 12:08:32.928354 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-fdswd-config-8d7cr"] Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.024059 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/c6917c5b-df80-431c-af8f-e126bfac96b2-additional-scripts\") pod \"ovn-controller-fdswd-config-8d7cr\" (UID: \"c6917c5b-df80-431c-af8f-e126bfac96b2\") " pod="openstack/ovn-controller-fdswd-config-8d7cr" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.027711 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c6917c5b-df80-431c-af8f-e126bfac96b2-var-run\") pod \"ovn-controller-fdswd-config-8d7cr\" (UID: \"c6917c5b-df80-431c-af8f-e126bfac96b2\") " pod="openstack/ovn-controller-fdswd-config-8d7cr" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.027800 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c6917c5b-df80-431c-af8f-e126bfac96b2-var-log-ovn\") pod \"ovn-controller-fdswd-config-8d7cr\" (UID: \"c6917c5b-df80-431c-af8f-e126bfac96b2\") " pod="openstack/ovn-controller-fdswd-config-8d7cr" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.028109 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zswt4\" (UniqueName: \"kubernetes.io/projected/c6917c5b-df80-431c-af8f-e126bfac96b2-kube-api-access-zswt4\") pod \"ovn-controller-fdswd-config-8d7cr\" (UID: \"c6917c5b-df80-431c-af8f-e126bfac96b2\") " pod="openstack/ovn-controller-fdswd-config-8d7cr" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.028546 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c6917c5b-df80-431c-af8f-e126bfac96b2-scripts\") pod \"ovn-controller-fdswd-config-8d7cr\" (UID: \"c6917c5b-df80-431c-af8f-e126bfac96b2\") " pod="openstack/ovn-controller-fdswd-config-8d7cr" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.028704 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c6917c5b-df80-431c-af8f-e126bfac96b2-var-run-ovn\") pod \"ovn-controller-fdswd-config-8d7cr\" (UID: \"c6917c5b-df80-431c-af8f-e126bfac96b2\") " pod="openstack/ovn-controller-fdswd-config-8d7cr" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.029360 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c6917c5b-df80-431c-af8f-e126bfac96b2-var-run-ovn\") pod \"ovn-controller-fdswd-config-8d7cr\" (UID: \"c6917c5b-df80-431c-af8f-e126bfac96b2\") " pod="openstack/ovn-controller-fdswd-config-8d7cr" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.025547 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/c6917c5b-df80-431c-af8f-e126bfac96b2-additional-scripts\") pod \"ovn-controller-fdswd-config-8d7cr\" (UID: \"c6917c5b-df80-431c-af8f-e126bfac96b2\") " pod="openstack/ovn-controller-fdswd-config-8d7cr" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.029610 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c6917c5b-df80-431c-af8f-e126bfac96b2-var-run\") pod \"ovn-controller-fdswd-config-8d7cr\" (UID: \"c6917c5b-df80-431c-af8f-e126bfac96b2\") " pod="openstack/ovn-controller-fdswd-config-8d7cr" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.029727 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c6917c5b-df80-431c-af8f-e126bfac96b2-var-log-ovn\") pod \"ovn-controller-fdswd-config-8d7cr\" (UID: \"c6917c5b-df80-431c-af8f-e126bfac96b2\") " pod="openstack/ovn-controller-fdswd-config-8d7cr" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.032750 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c6917c5b-df80-431c-af8f-e126bfac96b2-scripts\") pod \"ovn-controller-fdswd-config-8d7cr\" (UID: \"c6917c5b-df80-431c-af8f-e126bfac96b2\") " pod="openstack/ovn-controller-fdswd-config-8d7cr" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.061083 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zswt4\" (UniqueName: \"kubernetes.io/projected/c6917c5b-df80-431c-af8f-e126bfac96b2-kube-api-access-zswt4\") pod \"ovn-controller-fdswd-config-8d7cr\" (UID: \"c6917c5b-df80-431c-af8f-e126bfac96b2\") " pod="openstack/ovn-controller-fdswd-config-8d7cr" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.142520 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-70b4-account-create-update-jmrxl" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.149895 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-trt8p" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.168149 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-lph7c" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.232972 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5838830-ab2d-4d0d-ab22-7448352db030-operator-scripts\") pod \"e5838830-ab2d-4d0d-ab22-7448352db030\" (UID: \"e5838830-ab2d-4d0d-ab22-7448352db030\") " Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.233081 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gqkqq\" (UniqueName: \"kubernetes.io/projected/da444afc-236b-465d-a105-f6c3b25f677c-kube-api-access-gqkqq\") pod \"da444afc-236b-465d-a105-f6c3b25f677c\" (UID: \"da444afc-236b-465d-a105-f6c3b25f677c\") " Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.233167 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-plv8z\" (UniqueName: \"kubernetes.io/projected/4ad51468-82d2-4b49-b778-f2d296eafbf1-kube-api-access-plv8z\") pod \"4ad51468-82d2-4b49-b778-f2d296eafbf1\" (UID: \"4ad51468-82d2-4b49-b778-f2d296eafbf1\") " Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.233196 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h5gh9\" (UniqueName: \"kubernetes.io/projected/e5838830-ab2d-4d0d-ab22-7448352db030-kube-api-access-h5gh9\") pod \"e5838830-ab2d-4d0d-ab22-7448352db030\" (UID: \"e5838830-ab2d-4d0d-ab22-7448352db030\") " Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.233242 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/da444afc-236b-465d-a105-f6c3b25f677c-operator-scripts\") pod \"da444afc-236b-465d-a105-f6c3b25f677c\" (UID: \"da444afc-236b-465d-a105-f6c3b25f677c\") " Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.233296 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ad51468-82d2-4b49-b778-f2d296eafbf1-operator-scripts\") pod \"4ad51468-82d2-4b49-b778-f2d296eafbf1\" (UID: \"4ad51468-82d2-4b49-b778-f2d296eafbf1\") " Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.234113 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5838830-ab2d-4d0d-ab22-7448352db030-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e5838830-ab2d-4d0d-ab22-7448352db030" (UID: "e5838830-ab2d-4d0d-ab22-7448352db030"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.234136 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ad51468-82d2-4b49-b778-f2d296eafbf1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4ad51468-82d2-4b49-b778-f2d296eafbf1" (UID: "4ad51468-82d2-4b49-b778-f2d296eafbf1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.234215 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/da444afc-236b-465d-a105-f6c3b25f677c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "da444afc-236b-465d-a105-f6c3b25f677c" (UID: "da444afc-236b-465d-a105-f6c3b25f677c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.238522 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da444afc-236b-465d-a105-f6c3b25f677c-kube-api-access-gqkqq" (OuterVolumeSpecName: "kube-api-access-gqkqq") pod "da444afc-236b-465d-a105-f6c3b25f677c" (UID: "da444afc-236b-465d-a105-f6c3b25f677c"). InnerVolumeSpecName "kube-api-access-gqkqq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.238597 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5838830-ab2d-4d0d-ab22-7448352db030-kube-api-access-h5gh9" (OuterVolumeSpecName: "kube-api-access-h5gh9") pod "e5838830-ab2d-4d0d-ab22-7448352db030" (UID: "e5838830-ab2d-4d0d-ab22-7448352db030"). InnerVolumeSpecName "kube-api-access-h5gh9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.239194 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ad51468-82d2-4b49-b778-f2d296eafbf1-kube-api-access-plv8z" (OuterVolumeSpecName: "kube-api-access-plv8z") pod "4ad51468-82d2-4b49-b778-f2d296eafbf1" (UID: "4ad51468-82d2-4b49-b778-f2d296eafbf1"). InnerVolumeSpecName "kube-api-access-plv8z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.330731 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-fdswd-config-8d7cr" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.339362 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/da444afc-236b-465d-a105-f6c3b25f677c-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.339410 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ad51468-82d2-4b49-b778-f2d296eafbf1-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.339424 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5838830-ab2d-4d0d-ab22-7448352db030-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.339442 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gqkqq\" (UniqueName: \"kubernetes.io/projected/da444afc-236b-465d-a105-f6c3b25f677c-kube-api-access-gqkqq\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.339455 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-plv8z\" (UniqueName: \"kubernetes.io/projected/4ad51468-82d2-4b49-b778-f2d296eafbf1-kube-api-access-plv8z\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.339469 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h5gh9\" (UniqueName: \"kubernetes.io/projected/e5838830-ab2d-4d0d-ab22-7448352db030-kube-api-access-h5gh9\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.441084 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e387635d-9ef2-4b1d-9303-0d762e8b282c-etc-swift\") pod \"swift-storage-0\" (UID: \"e387635d-9ef2-4b1d-9303-0d762e8b282c\") " pod="openstack/swift-storage-0" Jan 04 12:08:33 crc kubenswrapper[5003]: E0104 12:08:33.441168 5003 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Jan 04 12:08:33 crc kubenswrapper[5003]: E0104 12:08:33.441197 5003 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Jan 04 12:08:33 crc kubenswrapper[5003]: E0104 12:08:33.441246 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e387635d-9ef2-4b1d-9303-0d762e8b282c-etc-swift podName:e387635d-9ef2-4b1d-9303-0d762e8b282c nodeName:}" failed. No retries permitted until 2026-01-04 12:08:41.441231228 +0000 UTC m=+1236.914261069 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e387635d-9ef2-4b1d-9303-0d762e8b282c-etc-swift") pod "swift-storage-0" (UID: "e387635d-9ef2-4b1d-9303-0d762e8b282c") : configmap "swift-ring-files" not found Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.692786 5003 generic.go:334] "Generic (PLEG): container finished" podID="3121d3df-082a-434a-8bad-4dce3e7d9b09" containerID="566da8ce745337c540dde9867ce8455893f00050b6234d3463ef536df3a979bc" exitCode=0 Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.692972 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-788e-account-create-update-bg6xj" event={"ID":"3121d3df-082a-434a-8bad-4dce3e7d9b09","Type":"ContainerDied","Data":"566da8ce745337c540dde9867ce8455893f00050b6234d3463ef536df3a979bc"} Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.693260 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-788e-account-create-update-bg6xj" event={"ID":"3121d3df-082a-434a-8bad-4dce3e7d9b09","Type":"ContainerStarted","Data":"33d3d9e5189d1d75685ba7fb131b465cba1a0f92b05c6d2537945bf8d847956d"} Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.695715 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-70b4-account-create-update-jmrxl" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.695736 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-70b4-account-create-update-jmrxl" event={"ID":"4ad51468-82d2-4b49-b778-f2d296eafbf1","Type":"ContainerDied","Data":"491cc6a58aec592f9ecc1a8edf7594d79aef695a695ce5643eec2825135576dc"} Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.695771 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="491cc6a58aec592f9ecc1a8edf7594d79aef695a695ce5643eec2825135576dc" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.709934 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-trt8p" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.709930 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-trt8p" event={"ID":"da444afc-236b-465d-a105-f6c3b25f677c","Type":"ContainerDied","Data":"14fedee265e9f1301b4256b06d046a0de52d138d124a0b3cddfb4b33fdb4b3a3"} Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.710211 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="14fedee265e9f1301b4256b06d046a0de52d138d124a0b3cddfb4b33fdb4b3a3" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.714354 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-lph7c" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.714679 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-lph7c" event={"ID":"e5838830-ab2d-4d0d-ab22-7448352db030","Type":"ContainerDied","Data":"3bce6e4a2c1e7a5b92de173862eb2c36b82f4d7a2aec5ab31a032db752ab7eb8"} Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.714731 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3bce6e4a2c1e7a5b92de173862eb2c36b82f4d7a2aec5ab31a032db752ab7eb8" Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.716869 5003 generic.go:334] "Generic (PLEG): container finished" podID="50703d14-b53d-4ef3-8b2a-790b55e0c5d1" containerID="5bb101871bdcd0c3e3c7a985d0c07f6658b8187319508b5024f9eedf1dfb8867" exitCode=0 Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.717036 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-xfd9f" event={"ID":"50703d14-b53d-4ef3-8b2a-790b55e0c5d1","Type":"ContainerDied","Data":"5bb101871bdcd0c3e3c7a985d0c07f6658b8187319508b5024f9eedf1dfb8867"} Jan 04 12:08:33 crc kubenswrapper[5003]: I0104 12:08:33.876708 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-fdswd-config-8d7cr"] Jan 04 12:08:34 crc kubenswrapper[5003]: I0104 12:08:34.617309 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-67fdf7998c-npwcd" Jan 04 12:08:34 crc kubenswrapper[5003]: I0104 12:08:34.702997 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-rs66m"] Jan 04 12:08:34 crc kubenswrapper[5003]: I0104 12:08:34.703616 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-586b989cdc-rs66m" podUID="967309ce-cd9e-4966-87fd-da0912bf701a" containerName="dnsmasq-dns" containerID="cri-o://be0658409d3952a3d07b499b69dcd21fb8f8c24b2432df277ce2cdc4c6694340" gracePeriod=10 Jan 04 12:08:34 crc kubenswrapper[5003]: I0104 12:08:34.747935 5003 generic.go:334] "Generic (PLEG): container finished" podID="c6917c5b-df80-431c-af8f-e126bfac96b2" containerID="e81fb75cb09615116c6e41b45884e31ed54d53e90ad9d7239b96cc08c9c39dc0" exitCode=0 Jan 04 12:08:34 crc kubenswrapper[5003]: I0104 12:08:34.748771 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-fdswd-config-8d7cr" event={"ID":"c6917c5b-df80-431c-af8f-e126bfac96b2","Type":"ContainerDied","Data":"e81fb75cb09615116c6e41b45884e31ed54d53e90ad9d7239b96cc08c9c39dc0"} Jan 04 12:08:34 crc kubenswrapper[5003]: I0104 12:08:34.748798 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-fdswd-config-8d7cr" event={"ID":"c6917c5b-df80-431c-af8f-e126bfac96b2","Type":"ContainerStarted","Data":"fadb0c837bd9f4ee41b5a2af2a5bbb8ffe119af1eb813a4f2eb6dbb267b46e40"} Jan 04 12:08:35 crc kubenswrapper[5003]: I0104 12:08:35.166971 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-586b989cdc-rs66m" podUID="967309ce-cd9e-4966-87fd-da0912bf701a" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.116:5353: connect: connection refused" Jan 04 12:08:35 crc kubenswrapper[5003]: I0104 12:08:35.205773 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-788e-account-create-update-bg6xj" Jan 04 12:08:35 crc kubenswrapper[5003]: I0104 12:08:35.208233 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-xfd9f" Jan 04 12:08:35 crc kubenswrapper[5003]: I0104 12:08:35.272739 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/50703d14-b53d-4ef3-8b2a-790b55e0c5d1-operator-scripts\") pod \"50703d14-b53d-4ef3-8b2a-790b55e0c5d1\" (UID: \"50703d14-b53d-4ef3-8b2a-790b55e0c5d1\") " Jan 04 12:08:35 crc kubenswrapper[5003]: I0104 12:08:35.272852 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-glbs5\" (UniqueName: \"kubernetes.io/projected/3121d3df-082a-434a-8bad-4dce3e7d9b09-kube-api-access-glbs5\") pod \"3121d3df-082a-434a-8bad-4dce3e7d9b09\" (UID: \"3121d3df-082a-434a-8bad-4dce3e7d9b09\") " Jan 04 12:08:35 crc kubenswrapper[5003]: I0104 12:08:35.272983 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3121d3df-082a-434a-8bad-4dce3e7d9b09-operator-scripts\") pod \"3121d3df-082a-434a-8bad-4dce3e7d9b09\" (UID: \"3121d3df-082a-434a-8bad-4dce3e7d9b09\") " Jan 04 12:08:35 crc kubenswrapper[5003]: I0104 12:08:35.273116 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6fcft\" (UniqueName: \"kubernetes.io/projected/50703d14-b53d-4ef3-8b2a-790b55e0c5d1-kube-api-access-6fcft\") pod \"50703d14-b53d-4ef3-8b2a-790b55e0c5d1\" (UID: \"50703d14-b53d-4ef3-8b2a-790b55e0c5d1\") " Jan 04 12:08:35 crc kubenswrapper[5003]: I0104 12:08:35.273938 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/50703d14-b53d-4ef3-8b2a-790b55e0c5d1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "50703d14-b53d-4ef3-8b2a-790b55e0c5d1" (UID: "50703d14-b53d-4ef3-8b2a-790b55e0c5d1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:35 crc kubenswrapper[5003]: I0104 12:08:35.274256 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3121d3df-082a-434a-8bad-4dce3e7d9b09-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3121d3df-082a-434a-8bad-4dce3e7d9b09" (UID: "3121d3df-082a-434a-8bad-4dce3e7d9b09"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:35 crc kubenswrapper[5003]: I0104 12:08:35.283263 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50703d14-b53d-4ef3-8b2a-790b55e0c5d1-kube-api-access-6fcft" (OuterVolumeSpecName: "kube-api-access-6fcft") pod "50703d14-b53d-4ef3-8b2a-790b55e0c5d1" (UID: "50703d14-b53d-4ef3-8b2a-790b55e0c5d1"). InnerVolumeSpecName "kube-api-access-6fcft". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:35 crc kubenswrapper[5003]: I0104 12:08:35.283415 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3121d3df-082a-434a-8bad-4dce3e7d9b09-kube-api-access-glbs5" (OuterVolumeSpecName: "kube-api-access-glbs5") pod "3121d3df-082a-434a-8bad-4dce3e7d9b09" (UID: "3121d3df-082a-434a-8bad-4dce3e7d9b09"). InnerVolumeSpecName "kube-api-access-glbs5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:35 crc kubenswrapper[5003]: I0104 12:08:35.375051 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-glbs5\" (UniqueName: \"kubernetes.io/projected/3121d3df-082a-434a-8bad-4dce3e7d9b09-kube-api-access-glbs5\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:35 crc kubenswrapper[5003]: I0104 12:08:35.375085 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3121d3df-082a-434a-8bad-4dce3e7d9b09-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:35 crc kubenswrapper[5003]: I0104 12:08:35.375097 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6fcft\" (UniqueName: \"kubernetes.io/projected/50703d14-b53d-4ef3-8b2a-790b55e0c5d1-kube-api-access-6fcft\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:35 crc kubenswrapper[5003]: I0104 12:08:35.375111 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/50703d14-b53d-4ef3-8b2a-790b55e0c5d1-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:35 crc kubenswrapper[5003]: I0104 12:08:35.765689 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-xfd9f" event={"ID":"50703d14-b53d-4ef3-8b2a-790b55e0c5d1","Type":"ContainerDied","Data":"a604d9b8548684a88175e71265eb441c7e554b032001978e93d0715838614892"} Jan 04 12:08:35 crc kubenswrapper[5003]: I0104 12:08:35.765742 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a604d9b8548684a88175e71265eb441c7e554b032001978e93d0715838614892" Jan 04 12:08:35 crc kubenswrapper[5003]: I0104 12:08:35.765815 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-xfd9f" Jan 04 12:08:35 crc kubenswrapper[5003]: I0104 12:08:35.771711 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-788e-account-create-update-bg6xj" event={"ID":"3121d3df-082a-434a-8bad-4dce3e7d9b09","Type":"ContainerDied","Data":"33d3d9e5189d1d75685ba7fb131b465cba1a0f92b05c6d2537945bf8d847956d"} Jan 04 12:08:35 crc kubenswrapper[5003]: I0104 12:08:35.771765 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="33d3d9e5189d1d75685ba7fb131b465cba1a0f92b05c6d2537945bf8d847956d" Jan 04 12:08:35 crc kubenswrapper[5003]: I0104 12:08:35.771768 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-788e-account-create-update-bg6xj" Jan 04 12:08:36 crc kubenswrapper[5003]: I0104 12:08:36.148040 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-fdswd-config-8d7cr" Jan 04 12:08:36 crc kubenswrapper[5003]: I0104 12:08:36.191372 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c6917c5b-df80-431c-af8f-e126bfac96b2-var-log-ovn\") pod \"c6917c5b-df80-431c-af8f-e126bfac96b2\" (UID: \"c6917c5b-df80-431c-af8f-e126bfac96b2\") " Jan 04 12:08:36 crc kubenswrapper[5003]: I0104 12:08:36.191456 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/c6917c5b-df80-431c-af8f-e126bfac96b2-additional-scripts\") pod \"c6917c5b-df80-431c-af8f-e126bfac96b2\" (UID: \"c6917c5b-df80-431c-af8f-e126bfac96b2\") " Jan 04 12:08:36 crc kubenswrapper[5003]: I0104 12:08:36.191521 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c6917c5b-df80-431c-af8f-e126bfac96b2-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "c6917c5b-df80-431c-af8f-e126bfac96b2" (UID: "c6917c5b-df80-431c-af8f-e126bfac96b2"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:08:36 crc kubenswrapper[5003]: I0104 12:08:36.191556 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c6917c5b-df80-431c-af8f-e126bfac96b2-scripts\") pod \"c6917c5b-df80-431c-af8f-e126bfac96b2\" (UID: \"c6917c5b-df80-431c-af8f-e126bfac96b2\") " Jan 04 12:08:36 crc kubenswrapper[5003]: I0104 12:08:36.191591 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zswt4\" (UniqueName: \"kubernetes.io/projected/c6917c5b-df80-431c-af8f-e126bfac96b2-kube-api-access-zswt4\") pod \"c6917c5b-df80-431c-af8f-e126bfac96b2\" (UID: \"c6917c5b-df80-431c-af8f-e126bfac96b2\") " Jan 04 12:08:36 crc kubenswrapper[5003]: I0104 12:08:36.191631 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c6917c5b-df80-431c-af8f-e126bfac96b2-var-run-ovn\") pod \"c6917c5b-df80-431c-af8f-e126bfac96b2\" (UID: \"c6917c5b-df80-431c-af8f-e126bfac96b2\") " Jan 04 12:08:36 crc kubenswrapper[5003]: I0104 12:08:36.191774 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c6917c5b-df80-431c-af8f-e126bfac96b2-var-run\") pod \"c6917c5b-df80-431c-af8f-e126bfac96b2\" (UID: \"c6917c5b-df80-431c-af8f-e126bfac96b2\") " Jan 04 12:08:36 crc kubenswrapper[5003]: I0104 12:08:36.192296 5003 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c6917c5b-df80-431c-af8f-e126bfac96b2-var-log-ovn\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:36 crc kubenswrapper[5003]: I0104 12:08:36.192338 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c6917c5b-df80-431c-af8f-e126bfac96b2-var-run" (OuterVolumeSpecName: "var-run") pod "c6917c5b-df80-431c-af8f-e126bfac96b2" (UID: "c6917c5b-df80-431c-af8f-e126bfac96b2"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:08:36 crc kubenswrapper[5003]: I0104 12:08:36.192378 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c6917c5b-df80-431c-af8f-e126bfac96b2-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "c6917c5b-df80-431c-af8f-e126bfac96b2" (UID: "c6917c5b-df80-431c-af8f-e126bfac96b2"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:08:36 crc kubenswrapper[5003]: I0104 12:08:36.192507 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6917c5b-df80-431c-af8f-e126bfac96b2-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "c6917c5b-df80-431c-af8f-e126bfac96b2" (UID: "c6917c5b-df80-431c-af8f-e126bfac96b2"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:36 crc kubenswrapper[5003]: I0104 12:08:36.192780 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6917c5b-df80-431c-af8f-e126bfac96b2-scripts" (OuterVolumeSpecName: "scripts") pod "c6917c5b-df80-431c-af8f-e126bfac96b2" (UID: "c6917c5b-df80-431c-af8f-e126bfac96b2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:36 crc kubenswrapper[5003]: I0104 12:08:36.195755 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6917c5b-df80-431c-af8f-e126bfac96b2-kube-api-access-zswt4" (OuterVolumeSpecName: "kube-api-access-zswt4") pod "c6917c5b-df80-431c-af8f-e126bfac96b2" (UID: "c6917c5b-df80-431c-af8f-e126bfac96b2"). InnerVolumeSpecName "kube-api-access-zswt4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:36 crc kubenswrapper[5003]: I0104 12:08:36.293973 5003 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/c6917c5b-df80-431c-af8f-e126bfac96b2-additional-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:36 crc kubenswrapper[5003]: I0104 12:08:36.294029 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c6917c5b-df80-431c-af8f-e126bfac96b2-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:36 crc kubenswrapper[5003]: I0104 12:08:36.294039 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zswt4\" (UniqueName: \"kubernetes.io/projected/c6917c5b-df80-431c-af8f-e126bfac96b2-kube-api-access-zswt4\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:36 crc kubenswrapper[5003]: I0104 12:08:36.294051 5003 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c6917c5b-df80-431c-af8f-e126bfac96b2-var-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:36 crc kubenswrapper[5003]: I0104 12:08:36.294059 5003 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c6917c5b-df80-431c-af8f-e126bfac96b2-var-run\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:36 crc kubenswrapper[5003]: I0104 12:08:36.782522 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-fdswd-config-8d7cr" Jan 04 12:08:36 crc kubenswrapper[5003]: I0104 12:08:36.782512 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-fdswd-config-8d7cr" event={"ID":"c6917c5b-df80-431c-af8f-e126bfac96b2","Type":"ContainerDied","Data":"fadb0c837bd9f4ee41b5a2af2a5bbb8ffe119af1eb813a4f2eb6dbb267b46e40"} Jan 04 12:08:36 crc kubenswrapper[5003]: I0104 12:08:36.782678 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fadb0c837bd9f4ee41b5a2af2a5bbb8ffe119af1eb813a4f2eb6dbb267b46e40" Jan 04 12:08:36 crc kubenswrapper[5003]: I0104 12:08:36.784533 5003 generic.go:334] "Generic (PLEG): container finished" podID="967309ce-cd9e-4966-87fd-da0912bf701a" containerID="be0658409d3952a3d07b499b69dcd21fb8f8c24b2432df277ce2cdc4c6694340" exitCode=0 Jan 04 12:08:36 crc kubenswrapper[5003]: I0104 12:08:36.784571 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586b989cdc-rs66m" event={"ID":"967309ce-cd9e-4966-87fd-da0912bf701a","Type":"ContainerDied","Data":"be0658409d3952a3d07b499b69dcd21fb8f8c24b2432df277ce2cdc4c6694340"} Jan 04 12:08:37 crc kubenswrapper[5003]: I0104 12:08:37.240461 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-fdswd-config-8d7cr"] Jan 04 12:08:37 crc kubenswrapper[5003]: I0104 12:08:37.245915 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-fdswd-config-8d7cr"] Jan 04 12:08:37 crc kubenswrapper[5003]: I0104 12:08:37.258394 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586b989cdc-rs66m" Jan 04 12:08:37 crc kubenswrapper[5003]: I0104 12:08:37.316174 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vldrb\" (UniqueName: \"kubernetes.io/projected/967309ce-cd9e-4966-87fd-da0912bf701a-kube-api-access-vldrb\") pod \"967309ce-cd9e-4966-87fd-da0912bf701a\" (UID: \"967309ce-cd9e-4966-87fd-da0912bf701a\") " Jan 04 12:08:37 crc kubenswrapper[5003]: I0104 12:08:37.316249 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/967309ce-cd9e-4966-87fd-da0912bf701a-ovsdbserver-sb\") pod \"967309ce-cd9e-4966-87fd-da0912bf701a\" (UID: \"967309ce-cd9e-4966-87fd-da0912bf701a\") " Jan 04 12:08:37 crc kubenswrapper[5003]: I0104 12:08:37.316279 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/967309ce-cd9e-4966-87fd-da0912bf701a-ovsdbserver-nb\") pod \"967309ce-cd9e-4966-87fd-da0912bf701a\" (UID: \"967309ce-cd9e-4966-87fd-da0912bf701a\") " Jan 04 12:08:37 crc kubenswrapper[5003]: I0104 12:08:37.316305 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/967309ce-cd9e-4966-87fd-da0912bf701a-dns-svc\") pod \"967309ce-cd9e-4966-87fd-da0912bf701a\" (UID: \"967309ce-cd9e-4966-87fd-da0912bf701a\") " Jan 04 12:08:37 crc kubenswrapper[5003]: I0104 12:08:37.316428 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/967309ce-cd9e-4966-87fd-da0912bf701a-config\") pod \"967309ce-cd9e-4966-87fd-da0912bf701a\" (UID: \"967309ce-cd9e-4966-87fd-da0912bf701a\") " Jan 04 12:08:37 crc kubenswrapper[5003]: I0104 12:08:37.376778 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/967309ce-cd9e-4966-87fd-da0912bf701a-kube-api-access-vldrb" (OuterVolumeSpecName: "kube-api-access-vldrb") pod "967309ce-cd9e-4966-87fd-da0912bf701a" (UID: "967309ce-cd9e-4966-87fd-da0912bf701a"). InnerVolumeSpecName "kube-api-access-vldrb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:37 crc kubenswrapper[5003]: I0104 12:08:37.378695 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/967309ce-cd9e-4966-87fd-da0912bf701a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "967309ce-cd9e-4966-87fd-da0912bf701a" (UID: "967309ce-cd9e-4966-87fd-da0912bf701a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:37 crc kubenswrapper[5003]: I0104 12:08:37.378868 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/967309ce-cd9e-4966-87fd-da0912bf701a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "967309ce-cd9e-4966-87fd-da0912bf701a" (UID: "967309ce-cd9e-4966-87fd-da0912bf701a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:37 crc kubenswrapper[5003]: I0104 12:08:37.382890 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/967309ce-cd9e-4966-87fd-da0912bf701a-config" (OuterVolumeSpecName: "config") pod "967309ce-cd9e-4966-87fd-da0912bf701a" (UID: "967309ce-cd9e-4966-87fd-da0912bf701a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:37 crc kubenswrapper[5003]: I0104 12:08:37.399190 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/967309ce-cd9e-4966-87fd-da0912bf701a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "967309ce-cd9e-4966-87fd-da0912bf701a" (UID: "967309ce-cd9e-4966-87fd-da0912bf701a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:37 crc kubenswrapper[5003]: I0104 12:08:37.417858 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/967309ce-cd9e-4966-87fd-da0912bf701a-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:37 crc kubenswrapper[5003]: I0104 12:08:37.418327 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vldrb\" (UniqueName: \"kubernetes.io/projected/967309ce-cd9e-4966-87fd-da0912bf701a-kube-api-access-vldrb\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:37 crc kubenswrapper[5003]: I0104 12:08:37.418343 5003 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/967309ce-cd9e-4966-87fd-da0912bf701a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:37 crc kubenswrapper[5003]: I0104 12:08:37.418356 5003 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/967309ce-cd9e-4966-87fd-da0912bf701a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:37 crc kubenswrapper[5003]: I0104 12:08:37.418365 5003 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/967309ce-cd9e-4966-87fd-da0912bf701a-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:37 crc kubenswrapper[5003]: I0104 12:08:37.482074 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-fdswd" Jan 04 12:08:37 crc kubenswrapper[5003]: I0104 12:08:37.795948 5003 generic.go:334] "Generic (PLEG): container finished" podID="829003dc-aa5e-43a6-a4f5-c578c73e76d4" containerID="6364cbec859dec141ca449b3d978906aef35d877a78403265122c821233736ff" exitCode=0 Jan 04 12:08:37 crc kubenswrapper[5003]: I0104 12:08:37.796152 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"829003dc-aa5e-43a6-a4f5-c578c73e76d4","Type":"ContainerDied","Data":"6364cbec859dec141ca449b3d978906aef35d877a78403265122c821233736ff"} Jan 04 12:08:37 crc kubenswrapper[5003]: I0104 12:08:37.800438 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586b989cdc-rs66m" event={"ID":"967309ce-cd9e-4966-87fd-da0912bf701a","Type":"ContainerDied","Data":"f10b995e43eec18faba6a9bc483cf5b19d2ebd3ca43784d911b6fc6f905d2947"} Jan 04 12:08:37 crc kubenswrapper[5003]: I0104 12:08:37.800499 5003 scope.go:117] "RemoveContainer" containerID="be0658409d3952a3d07b499b69dcd21fb8f8c24b2432df277ce2cdc4c6694340" Jan 04 12:08:37 crc kubenswrapper[5003]: I0104 12:08:37.800642 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586b989cdc-rs66m" Jan 04 12:08:37 crc kubenswrapper[5003]: I0104 12:08:37.837982 5003 scope.go:117] "RemoveContainer" containerID="2b20df2efc15be43b6d5466b005754a98c8743980da63fdc96b9de64d7424a8d" Jan 04 12:08:37 crc kubenswrapper[5003]: I0104 12:08:37.869364 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-rs66m"] Jan 04 12:08:37 crc kubenswrapper[5003]: I0104 12:08:37.878459 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-586b989cdc-rs66m"] Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.081917 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-jvhdz"] Jan 04 12:08:38 crc kubenswrapper[5003]: E0104 12:08:38.082338 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da444afc-236b-465d-a105-f6c3b25f677c" containerName="mariadb-account-create-update" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.082398 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="da444afc-236b-465d-a105-f6c3b25f677c" containerName="mariadb-account-create-update" Jan 04 12:08:38 crc kubenswrapper[5003]: E0104 12:08:38.082416 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5838830-ab2d-4d0d-ab22-7448352db030" containerName="mariadb-database-create" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.082426 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5838830-ab2d-4d0d-ab22-7448352db030" containerName="mariadb-database-create" Jan 04 12:08:38 crc kubenswrapper[5003]: E0104 12:08:38.082445 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="967309ce-cd9e-4966-87fd-da0912bf701a" containerName="init" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.082455 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="967309ce-cd9e-4966-87fd-da0912bf701a" containerName="init" Jan 04 12:08:38 crc kubenswrapper[5003]: E0104 12:08:38.082467 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="967309ce-cd9e-4966-87fd-da0912bf701a" containerName="dnsmasq-dns" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.082476 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="967309ce-cd9e-4966-87fd-da0912bf701a" containerName="dnsmasq-dns" Jan 04 12:08:38 crc kubenswrapper[5003]: E0104 12:08:38.082493 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3121d3df-082a-434a-8bad-4dce3e7d9b09" containerName="mariadb-account-create-update" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.082501 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="3121d3df-082a-434a-8bad-4dce3e7d9b09" containerName="mariadb-account-create-update" Jan 04 12:08:38 crc kubenswrapper[5003]: E0104 12:08:38.082521 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6917c5b-df80-431c-af8f-e126bfac96b2" containerName="ovn-config" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.082529 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6917c5b-df80-431c-af8f-e126bfac96b2" containerName="ovn-config" Jan 04 12:08:38 crc kubenswrapper[5003]: E0104 12:08:38.082548 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50703d14-b53d-4ef3-8b2a-790b55e0c5d1" containerName="mariadb-database-create" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.082557 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="50703d14-b53d-4ef3-8b2a-790b55e0c5d1" containerName="mariadb-database-create" Jan 04 12:08:38 crc kubenswrapper[5003]: E0104 12:08:38.082582 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ad51468-82d2-4b49-b778-f2d296eafbf1" containerName="mariadb-account-create-update" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.082591 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ad51468-82d2-4b49-b778-f2d296eafbf1" containerName="mariadb-account-create-update" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.082778 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="50703d14-b53d-4ef3-8b2a-790b55e0c5d1" containerName="mariadb-database-create" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.082803 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6917c5b-df80-431c-af8f-e126bfac96b2" containerName="ovn-config" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.082827 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="3121d3df-082a-434a-8bad-4dce3e7d9b09" containerName="mariadb-account-create-update" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.082838 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5838830-ab2d-4d0d-ab22-7448352db030" containerName="mariadb-database-create" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.082863 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="967309ce-cd9e-4966-87fd-da0912bf701a" containerName="dnsmasq-dns" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.082885 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="da444afc-236b-465d-a105-f6c3b25f677c" containerName="mariadb-account-create-update" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.082894 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ad51468-82d2-4b49-b778-f2d296eafbf1" containerName="mariadb-account-create-update" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.083625 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-jvhdz" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.086737 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.087202 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-xn574" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.097198 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-jvhdz"] Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.233693 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdgc9\" (UniqueName: \"kubernetes.io/projected/95845d2e-e8cf-4d56-ad63-260115e0efc8-kube-api-access-zdgc9\") pod \"glance-db-sync-jvhdz\" (UID: \"95845d2e-e8cf-4d56-ad63-260115e0efc8\") " pod="openstack/glance-db-sync-jvhdz" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.233817 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/95845d2e-e8cf-4d56-ad63-260115e0efc8-db-sync-config-data\") pod \"glance-db-sync-jvhdz\" (UID: \"95845d2e-e8cf-4d56-ad63-260115e0efc8\") " pod="openstack/glance-db-sync-jvhdz" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.233891 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95845d2e-e8cf-4d56-ad63-260115e0efc8-combined-ca-bundle\") pod \"glance-db-sync-jvhdz\" (UID: \"95845d2e-e8cf-4d56-ad63-260115e0efc8\") " pod="openstack/glance-db-sync-jvhdz" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.233918 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95845d2e-e8cf-4d56-ad63-260115e0efc8-config-data\") pod \"glance-db-sync-jvhdz\" (UID: \"95845d2e-e8cf-4d56-ad63-260115e0efc8\") " pod="openstack/glance-db-sync-jvhdz" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.336571 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95845d2e-e8cf-4d56-ad63-260115e0efc8-combined-ca-bundle\") pod \"glance-db-sync-jvhdz\" (UID: \"95845d2e-e8cf-4d56-ad63-260115e0efc8\") " pod="openstack/glance-db-sync-jvhdz" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.336646 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95845d2e-e8cf-4d56-ad63-260115e0efc8-config-data\") pod \"glance-db-sync-jvhdz\" (UID: \"95845d2e-e8cf-4d56-ad63-260115e0efc8\") " pod="openstack/glance-db-sync-jvhdz" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.336811 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdgc9\" (UniqueName: \"kubernetes.io/projected/95845d2e-e8cf-4d56-ad63-260115e0efc8-kube-api-access-zdgc9\") pod \"glance-db-sync-jvhdz\" (UID: \"95845d2e-e8cf-4d56-ad63-260115e0efc8\") " pod="openstack/glance-db-sync-jvhdz" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.336927 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/95845d2e-e8cf-4d56-ad63-260115e0efc8-db-sync-config-data\") pod \"glance-db-sync-jvhdz\" (UID: \"95845d2e-e8cf-4d56-ad63-260115e0efc8\") " pod="openstack/glance-db-sync-jvhdz" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.341193 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95845d2e-e8cf-4d56-ad63-260115e0efc8-config-data\") pod \"glance-db-sync-jvhdz\" (UID: \"95845d2e-e8cf-4d56-ad63-260115e0efc8\") " pod="openstack/glance-db-sync-jvhdz" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.356225 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdgc9\" (UniqueName: \"kubernetes.io/projected/95845d2e-e8cf-4d56-ad63-260115e0efc8-kube-api-access-zdgc9\") pod \"glance-db-sync-jvhdz\" (UID: \"95845d2e-e8cf-4d56-ad63-260115e0efc8\") " pod="openstack/glance-db-sync-jvhdz" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.356956 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95845d2e-e8cf-4d56-ad63-260115e0efc8-combined-ca-bundle\") pod \"glance-db-sync-jvhdz\" (UID: \"95845d2e-e8cf-4d56-ad63-260115e0efc8\") " pod="openstack/glance-db-sync-jvhdz" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.359480 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/95845d2e-e8cf-4d56-ad63-260115e0efc8-db-sync-config-data\") pod \"glance-db-sync-jvhdz\" (UID: \"95845d2e-e8cf-4d56-ad63-260115e0efc8\") " pod="openstack/glance-db-sync-jvhdz" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.402469 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-jvhdz" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.838385 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="967309ce-cd9e-4966-87fd-da0912bf701a" path="/var/lib/kubelet/pods/967309ce-cd9e-4966-87fd-da0912bf701a/volumes" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.839899 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6917c5b-df80-431c-af8f-e126bfac96b2" path="/var/lib/kubelet/pods/c6917c5b-df80-431c-af8f-e126bfac96b2/volumes" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.840806 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"829003dc-aa5e-43a6-a4f5-c578c73e76d4","Type":"ContainerStarted","Data":"a7db53d84b6d5b63248f6eb1e83906ab06a6912bc5b207be4b9a8cd84f1c3d9f"} Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.841100 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.879426 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.3380221 podStartE2EDuration="1m30.87939024s" podCreationTimestamp="2026-01-04 12:07:08 +0000 UTC" firstStartedPulling="2026-01-04 12:07:10.744209473 +0000 UTC m=+1146.217239314" lastFinishedPulling="2026-01-04 12:08:04.285577613 +0000 UTC m=+1199.758607454" observedRunningTime="2026-01-04 12:08:38.877624734 +0000 UTC m=+1234.350654615" watchObservedRunningTime="2026-01-04 12:08:38.87939024 +0000 UTC m=+1234.352420081" Jan 04 12:08:38 crc kubenswrapper[5003]: I0104 12:08:38.996806 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-jvhdz"] Jan 04 12:08:39 crc kubenswrapper[5003]: W0104 12:08:39.014258 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod95845d2e_e8cf_4d56_ad63_260115e0efc8.slice/crio-88e5e1fc04c450fd28bd8e291178470ed9e9e4acff0a29a2cc369251a9fc687e WatchSource:0}: Error finding container 88e5e1fc04c450fd28bd8e291178470ed9e9e4acff0a29a2cc369251a9fc687e: Status 404 returned error can't find the container with id 88e5e1fc04c450fd28bd8e291178470ed9e9e4acff0a29a2cc369251a9fc687e Jan 04 12:08:39 crc kubenswrapper[5003]: I0104 12:08:39.825134 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-jvhdz" event={"ID":"95845d2e-e8cf-4d56-ad63-260115e0efc8","Type":"ContainerStarted","Data":"88e5e1fc04c450fd28bd8e291178470ed9e9e4acff0a29a2cc369251a9fc687e"} Jan 04 12:08:40 crc kubenswrapper[5003]: I0104 12:08:40.832981 5003 generic.go:334] "Generic (PLEG): container finished" podID="37399d86-126f-4327-94c9-f41df343ab62" containerID="64406d146472e06022b356b2502a9a5cf00a7d3f689122719d04f157d1ad2893" exitCode=0 Jan 04 12:08:40 crc kubenswrapper[5003]: I0104 12:08:40.833092 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-g4p7b" event={"ID":"37399d86-126f-4327-94c9-f41df343ab62","Type":"ContainerDied","Data":"64406d146472e06022b356b2502a9a5cf00a7d3f689122719d04f157d1ad2893"} Jan 04 12:08:41 crc kubenswrapper[5003]: I0104 12:08:41.497386 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e387635d-9ef2-4b1d-9303-0d762e8b282c-etc-swift\") pod \"swift-storage-0\" (UID: \"e387635d-9ef2-4b1d-9303-0d762e8b282c\") " pod="openstack/swift-storage-0" Jan 04 12:08:41 crc kubenswrapper[5003]: I0104 12:08:41.504787 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e387635d-9ef2-4b1d-9303-0d762e8b282c-etc-swift\") pod \"swift-storage-0\" (UID: \"e387635d-9ef2-4b1d-9303-0d762e8b282c\") " pod="openstack/swift-storage-0" Jan 04 12:08:41 crc kubenswrapper[5003]: I0104 12:08:41.744609 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Jan 04 12:08:41 crc kubenswrapper[5003]: I0104 12:08:41.847201 5003 generic.go:334] "Generic (PLEG): container finished" podID="81193935-fcd0-4877-9d65-6155c1a888e2" containerID="13206dc80be6f8795f671b42dac3396c5e445e376c8796f74fffdbfb54487a41" exitCode=0 Jan 04 12:08:41 crc kubenswrapper[5003]: I0104 12:08:41.847328 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"81193935-fcd0-4877-9d65-6155c1a888e2","Type":"ContainerDied","Data":"13206dc80be6f8795f671b42dac3396c5e445e376c8796f74fffdbfb54487a41"} Jan 04 12:08:42 crc kubenswrapper[5003]: I0104 12:08:42.236714 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-g4p7b" Jan 04 12:08:42 crc kubenswrapper[5003]: I0104 12:08:42.411650 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/37399d86-126f-4327-94c9-f41df343ab62-dispersionconf\") pod \"37399d86-126f-4327-94c9-f41df343ab62\" (UID: \"37399d86-126f-4327-94c9-f41df343ab62\") " Jan 04 12:08:42 crc kubenswrapper[5003]: I0104 12:08:42.412053 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/37399d86-126f-4327-94c9-f41df343ab62-ring-data-devices\") pod \"37399d86-126f-4327-94c9-f41df343ab62\" (UID: \"37399d86-126f-4327-94c9-f41df343ab62\") " Jan 04 12:08:42 crc kubenswrapper[5003]: I0104 12:08:42.412083 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vcmr2\" (UniqueName: \"kubernetes.io/projected/37399d86-126f-4327-94c9-f41df343ab62-kube-api-access-vcmr2\") pod \"37399d86-126f-4327-94c9-f41df343ab62\" (UID: \"37399d86-126f-4327-94c9-f41df343ab62\") " Jan 04 12:08:42 crc kubenswrapper[5003]: I0104 12:08:42.412108 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/37399d86-126f-4327-94c9-f41df343ab62-swiftconf\") pod \"37399d86-126f-4327-94c9-f41df343ab62\" (UID: \"37399d86-126f-4327-94c9-f41df343ab62\") " Jan 04 12:08:42 crc kubenswrapper[5003]: I0104 12:08:42.412195 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37399d86-126f-4327-94c9-f41df343ab62-combined-ca-bundle\") pod \"37399d86-126f-4327-94c9-f41df343ab62\" (UID: \"37399d86-126f-4327-94c9-f41df343ab62\") " Jan 04 12:08:42 crc kubenswrapper[5003]: I0104 12:08:42.412270 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/37399d86-126f-4327-94c9-f41df343ab62-etc-swift\") pod \"37399d86-126f-4327-94c9-f41df343ab62\" (UID: \"37399d86-126f-4327-94c9-f41df343ab62\") " Jan 04 12:08:42 crc kubenswrapper[5003]: I0104 12:08:42.412321 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/37399d86-126f-4327-94c9-f41df343ab62-scripts\") pod \"37399d86-126f-4327-94c9-f41df343ab62\" (UID: \"37399d86-126f-4327-94c9-f41df343ab62\") " Jan 04 12:08:42 crc kubenswrapper[5003]: I0104 12:08:42.412906 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37399d86-126f-4327-94c9-f41df343ab62-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "37399d86-126f-4327-94c9-f41df343ab62" (UID: "37399d86-126f-4327-94c9-f41df343ab62"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:42 crc kubenswrapper[5003]: I0104 12:08:42.413337 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37399d86-126f-4327-94c9-f41df343ab62-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "37399d86-126f-4327-94c9-f41df343ab62" (UID: "37399d86-126f-4327-94c9-f41df343ab62"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:08:42 crc kubenswrapper[5003]: I0104 12:08:42.413453 5003 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/37399d86-126f-4327-94c9-f41df343ab62-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:42 crc kubenswrapper[5003]: I0104 12:08:42.425273 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37399d86-126f-4327-94c9-f41df343ab62-kube-api-access-vcmr2" (OuterVolumeSpecName: "kube-api-access-vcmr2") pod "37399d86-126f-4327-94c9-f41df343ab62" (UID: "37399d86-126f-4327-94c9-f41df343ab62"). InnerVolumeSpecName "kube-api-access-vcmr2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:08:42 crc kubenswrapper[5003]: I0104 12:08:42.433510 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Jan 04 12:08:42 crc kubenswrapper[5003]: I0104 12:08:42.433688 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37399d86-126f-4327-94c9-f41df343ab62-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "37399d86-126f-4327-94c9-f41df343ab62" (UID: "37399d86-126f-4327-94c9-f41df343ab62"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:08:42 crc kubenswrapper[5003]: I0104 12:08:42.435891 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37399d86-126f-4327-94c9-f41df343ab62-scripts" (OuterVolumeSpecName: "scripts") pod "37399d86-126f-4327-94c9-f41df343ab62" (UID: "37399d86-126f-4327-94c9-f41df343ab62"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:08:42 crc kubenswrapper[5003]: W0104 12:08:42.437302 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode387635d_9ef2_4b1d_9303_0d762e8b282c.slice/crio-b6d95a1fc2bafb5486a724dc2fd9f03f5f51c484f76ffa2e8427f9be9b2205d8 WatchSource:0}: Error finding container b6d95a1fc2bafb5486a724dc2fd9f03f5f51c484f76ffa2e8427f9be9b2205d8: Status 404 returned error can't find the container with id b6d95a1fc2bafb5486a724dc2fd9f03f5f51c484f76ffa2e8427f9be9b2205d8 Jan 04 12:08:42 crc kubenswrapper[5003]: I0104 12:08:42.444221 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37399d86-126f-4327-94c9-f41df343ab62-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "37399d86-126f-4327-94c9-f41df343ab62" (UID: "37399d86-126f-4327-94c9-f41df343ab62"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:08:42 crc kubenswrapper[5003]: I0104 12:08:42.449560 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37399d86-126f-4327-94c9-f41df343ab62-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "37399d86-126f-4327-94c9-f41df343ab62" (UID: "37399d86-126f-4327-94c9-f41df343ab62"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:08:42 crc kubenswrapper[5003]: I0104 12:08:42.514590 5003 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/37399d86-126f-4327-94c9-f41df343ab62-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:42 crc kubenswrapper[5003]: I0104 12:08:42.514624 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37399d86-126f-4327-94c9-f41df343ab62-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:42 crc kubenswrapper[5003]: I0104 12:08:42.514634 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/37399d86-126f-4327-94c9-f41df343ab62-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:42 crc kubenswrapper[5003]: I0104 12:08:42.514644 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/37399d86-126f-4327-94c9-f41df343ab62-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:42 crc kubenswrapper[5003]: I0104 12:08:42.514652 5003 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/37399d86-126f-4327-94c9-f41df343ab62-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:42 crc kubenswrapper[5003]: I0104 12:08:42.514661 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vcmr2\" (UniqueName: \"kubernetes.io/projected/37399d86-126f-4327-94c9-f41df343ab62-kube-api-access-vcmr2\") on node \"crc\" DevicePath \"\"" Jan 04 12:08:42 crc kubenswrapper[5003]: I0104 12:08:42.867251 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-g4p7b" Jan 04 12:08:42 crc kubenswrapper[5003]: I0104 12:08:42.867232 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-g4p7b" event={"ID":"37399d86-126f-4327-94c9-f41df343ab62","Type":"ContainerDied","Data":"30acd2dffc7df832200ca0701532929d9e009cc2aad317da3b0a3e6905329066"} Jan 04 12:08:42 crc kubenswrapper[5003]: I0104 12:08:42.867546 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="30acd2dffc7df832200ca0701532929d9e009cc2aad317da3b0a3e6905329066" Jan 04 12:08:42 crc kubenswrapper[5003]: I0104 12:08:42.868884 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e387635d-9ef2-4b1d-9303-0d762e8b282c","Type":"ContainerStarted","Data":"b6d95a1fc2bafb5486a724dc2fd9f03f5f51c484f76ffa2e8427f9be9b2205d8"} Jan 04 12:08:42 crc kubenswrapper[5003]: I0104 12:08:42.870784 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"81193935-fcd0-4877-9d65-6155c1a888e2","Type":"ContainerStarted","Data":"f0dca325e90af7570f19f9ac0610466deeda038b06d2c3ca9f19a6c46586b480"} Jan 04 12:08:42 crc kubenswrapper[5003]: I0104 12:08:42.871135 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Jan 04 12:08:42 crc kubenswrapper[5003]: I0104 12:08:42.904133 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=-9223371940.95066 podStartE2EDuration="1m35.904115738s" podCreationTimestamp="2026-01-04 12:07:07 +0000 UTC" firstStartedPulling="2026-01-04 12:07:10.366871467 +0000 UTC m=+1145.839901308" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:08:42.89691784 +0000 UTC m=+1238.369947681" watchObservedRunningTime="2026-01-04 12:08:42.904115738 +0000 UTC m=+1238.377145589" Jan 04 12:08:43 crc kubenswrapper[5003]: E0104 12:08:43.028348 5003 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod37399d86_126f_4327_94c9_f41df343ab62.slice/crio-30acd2dffc7df832200ca0701532929d9e009cc2aad317da3b0a3e6905329066\": RecentStats: unable to find data in memory cache]" Jan 04 12:08:44 crc kubenswrapper[5003]: I0104 12:08:44.885978 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e387635d-9ef2-4b1d-9303-0d762e8b282c","Type":"ContainerStarted","Data":"8a065f77698f6ac06fadadaa5b0b12a9e635a05f7a2fd3ab7f7457eb16357d7d"} Jan 04 12:08:49 crc kubenswrapper[5003]: I0104 12:08:49.724282 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:08:56 crc kubenswrapper[5003]: E0104 12:08:56.352518 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api@sha256:e4aa4ebbb1e581a12040e9ad2ae2709ac31b5d965bb64fc4252d1028b05c565f" Jan 04 12:08:56 crc kubenswrapper[5003]: E0104 12:08:56.353389 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api@sha256:e4aa4ebbb1e581a12040e9ad2ae2709ac31b5d965bb64fc4252d1028b05c565f,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zdgc9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-jvhdz_openstack(95845d2e-e8cf-4d56-ad63-260115e0efc8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:08:56 crc kubenswrapper[5003]: E0104 12:08:56.354613 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-jvhdz" podUID="95845d2e-e8cf-4d56-ad63-260115e0efc8" Jan 04 12:08:57 crc kubenswrapper[5003]: I0104 12:08:57.004220 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e387635d-9ef2-4b1d-9303-0d762e8b282c","Type":"ContainerStarted","Data":"707399f94576a05d1029f0ca7a930546bdc46ba6b8a66a7f7d5123ee7b10547b"} Jan 04 12:08:57 crc kubenswrapper[5003]: I0104 12:08:57.004708 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e387635d-9ef2-4b1d-9303-0d762e8b282c","Type":"ContainerStarted","Data":"8d2a2c4e9a22b4fe3fd7b40f0290eeae57fd3d5fa8a0b12d022f40ed1d9de1ab"} Jan 04 12:08:57 crc kubenswrapper[5003]: I0104 12:08:57.004725 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e387635d-9ef2-4b1d-9303-0d762e8b282c","Type":"ContainerStarted","Data":"9859f2a44803d2a1d464de1de41f2bfd77e5f0896ae37ca5e574d6ba7d0b8491"} Jan 04 12:08:57 crc kubenswrapper[5003]: E0104 12:08:57.005853 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api@sha256:e4aa4ebbb1e581a12040e9ad2ae2709ac31b5d965bb64fc4252d1028b05c565f\\\"\"" pod="openstack/glance-db-sync-jvhdz" podUID="95845d2e-e8cf-4d56-ad63-260115e0efc8" Jan 04 12:08:59 crc kubenswrapper[5003]: I0104 12:08:59.032069 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e387635d-9ef2-4b1d-9303-0d762e8b282c","Type":"ContainerStarted","Data":"95da8a5d432f4fe64060441f956785bb4f966684b7334a245346aec06e1cf140"} Jan 04 12:08:59 crc kubenswrapper[5003]: I0104 12:08:59.032972 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e387635d-9ef2-4b1d-9303-0d762e8b282c","Type":"ContainerStarted","Data":"0f48ea6f8d2e18984ca5443a03a04938c8b360cd16c1c6815b02f2fb373f0a8e"} Jan 04 12:08:59 crc kubenswrapper[5003]: I0104 12:08:59.032984 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e387635d-9ef2-4b1d-9303-0d762e8b282c","Type":"ContainerStarted","Data":"f0aaa591de01ea442a981d1ec695614335ad33f8028cf7d9ef5da12021491ff0"} Jan 04 12:08:59 crc kubenswrapper[5003]: I0104 12:08:59.032996 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e387635d-9ef2-4b1d-9303-0d762e8b282c","Type":"ContainerStarted","Data":"dc00a97e29c22bf9a2a36c4af6d0c30fdef5266c6b5c76c89cee2d2f47cd401e"} Jan 04 12:08:59 crc kubenswrapper[5003]: I0104 12:08:59.576283 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Jan 04 12:08:59 crc kubenswrapper[5003]: I0104 12:08:59.927881 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-89hnr"] Jan 04 12:08:59 crc kubenswrapper[5003]: E0104 12:08:59.929534 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37399d86-126f-4327-94c9-f41df343ab62" containerName="swift-ring-rebalance" Jan 04 12:08:59 crc kubenswrapper[5003]: I0104 12:08:59.929563 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="37399d86-126f-4327-94c9-f41df343ab62" containerName="swift-ring-rebalance" Jan 04 12:08:59 crc kubenswrapper[5003]: I0104 12:08:59.929977 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="37399d86-126f-4327-94c9-f41df343ab62" containerName="swift-ring-rebalance" Jan 04 12:08:59 crc kubenswrapper[5003]: I0104 12:08:59.942373 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-89hnr"] Jan 04 12:08:59 crc kubenswrapper[5003]: I0104 12:08:59.942501 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-89hnr" Jan 04 12:08:59 crc kubenswrapper[5003]: I0104 12:08:59.998801 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-6hmnw"] Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.003415 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-6hmnw" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.022195 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-6hmnw"] Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.023547 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/03157841-c4cb-4d87-b168-ccbe7b52526d-operator-scripts\") pod \"cinder-db-create-89hnr\" (UID: \"03157841-c4cb-4d87-b168-ccbe7b52526d\") " pod="openstack/cinder-db-create-89hnr" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.023609 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8z5v\" (UniqueName: \"kubernetes.io/projected/96d2524c-4772-4eb6-b108-0513fce70ad8-kube-api-access-f8z5v\") pod \"barbican-db-create-6hmnw\" (UID: \"96d2524c-4772-4eb6-b108-0513fce70ad8\") " pod="openstack/barbican-db-create-6hmnw" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.023654 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzjcn\" (UniqueName: \"kubernetes.io/projected/03157841-c4cb-4d87-b168-ccbe7b52526d-kube-api-access-qzjcn\") pod \"cinder-db-create-89hnr\" (UID: \"03157841-c4cb-4d87-b168-ccbe7b52526d\") " pod="openstack/cinder-db-create-89hnr" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.023693 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96d2524c-4772-4eb6-b108-0513fce70ad8-operator-scripts\") pod \"barbican-db-create-6hmnw\" (UID: \"96d2524c-4772-4eb6-b108-0513fce70ad8\") " pod="openstack/barbican-db-create-6hmnw" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.039750 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-9b3b-account-create-update-vh6fj"] Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.041270 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-9b3b-account-create-update-vh6fj" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.046093 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.061179 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-9b3b-account-create-update-vh6fj"] Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.113173 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-553f-account-create-update-mhxbb"] Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.114197 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-553f-account-create-update-mhxbb" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.122998 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.124769 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzjcn\" (UniqueName: \"kubernetes.io/projected/03157841-c4cb-4d87-b168-ccbe7b52526d-kube-api-access-qzjcn\") pod \"cinder-db-create-89hnr\" (UID: \"03157841-c4cb-4d87-b168-ccbe7b52526d\") " pod="openstack/cinder-db-create-89hnr" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.124831 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96d2524c-4772-4eb6-b108-0513fce70ad8-operator-scripts\") pod \"barbican-db-create-6hmnw\" (UID: \"96d2524c-4772-4eb6-b108-0513fce70ad8\") " pod="openstack/barbican-db-create-6hmnw" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.124876 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756-operator-scripts\") pod \"cinder-9b3b-account-create-update-vh6fj\" (UID: \"190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756\") " pod="openstack/cinder-9b3b-account-create-update-vh6fj" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.124913 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8b5x\" (UniqueName: \"kubernetes.io/projected/190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756-kube-api-access-r8b5x\") pod \"cinder-9b3b-account-create-update-vh6fj\" (UID: \"190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756\") " pod="openstack/cinder-9b3b-account-create-update-vh6fj" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.124950 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/03157841-c4cb-4d87-b168-ccbe7b52526d-operator-scripts\") pod \"cinder-db-create-89hnr\" (UID: \"03157841-c4cb-4d87-b168-ccbe7b52526d\") " pod="openstack/cinder-db-create-89hnr" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.124985 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8z5v\" (UniqueName: \"kubernetes.io/projected/96d2524c-4772-4eb6-b108-0513fce70ad8-kube-api-access-f8z5v\") pod \"barbican-db-create-6hmnw\" (UID: \"96d2524c-4772-4eb6-b108-0513fce70ad8\") " pod="openstack/barbican-db-create-6hmnw" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.126104 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96d2524c-4772-4eb6-b108-0513fce70ad8-operator-scripts\") pod \"barbican-db-create-6hmnw\" (UID: \"96d2524c-4772-4eb6-b108-0513fce70ad8\") " pod="openstack/barbican-db-create-6hmnw" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.126005 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/03157841-c4cb-4d87-b168-ccbe7b52526d-operator-scripts\") pod \"cinder-db-create-89hnr\" (UID: \"03157841-c4cb-4d87-b168-ccbe7b52526d\") " pod="openstack/cinder-db-create-89hnr" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.130222 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-553f-account-create-update-mhxbb"] Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.172625 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8z5v\" (UniqueName: \"kubernetes.io/projected/96d2524c-4772-4eb6-b108-0513fce70ad8-kube-api-access-f8z5v\") pod \"barbican-db-create-6hmnw\" (UID: \"96d2524c-4772-4eb6-b108-0513fce70ad8\") " pod="openstack/barbican-db-create-6hmnw" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.177145 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzjcn\" (UniqueName: \"kubernetes.io/projected/03157841-c4cb-4d87-b168-ccbe7b52526d-kube-api-access-qzjcn\") pod \"cinder-db-create-89hnr\" (UID: \"03157841-c4cb-4d87-b168-ccbe7b52526d\") " pod="openstack/cinder-db-create-89hnr" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.226955 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/471449a2-d6cc-477b-bad9-a616a54f4502-operator-scripts\") pod \"barbican-553f-account-create-update-mhxbb\" (UID: \"471449a2-d6cc-477b-bad9-a616a54f4502\") " pod="openstack/barbican-553f-account-create-update-mhxbb" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.227089 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756-operator-scripts\") pod \"cinder-9b3b-account-create-update-vh6fj\" (UID: \"190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756\") " pod="openstack/cinder-9b3b-account-create-update-vh6fj" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.227131 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8b5x\" (UniqueName: \"kubernetes.io/projected/190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756-kube-api-access-r8b5x\") pod \"cinder-9b3b-account-create-update-vh6fj\" (UID: \"190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756\") " pod="openstack/cinder-9b3b-account-create-update-vh6fj" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.227292 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vc7w6\" (UniqueName: \"kubernetes.io/projected/471449a2-d6cc-477b-bad9-a616a54f4502-kube-api-access-vc7w6\") pod \"barbican-553f-account-create-update-mhxbb\" (UID: \"471449a2-d6cc-477b-bad9-a616a54f4502\") " pod="openstack/barbican-553f-account-create-update-mhxbb" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.227901 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756-operator-scripts\") pod \"cinder-9b3b-account-create-update-vh6fj\" (UID: \"190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756\") " pod="openstack/cinder-9b3b-account-create-update-vh6fj" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.236105 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-lq5s5"] Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.238879 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-lq5s5" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.240940 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.240979 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.241057 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-nsd8s" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.241488 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.255755 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8b5x\" (UniqueName: \"kubernetes.io/projected/190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756-kube-api-access-r8b5x\") pod \"cinder-9b3b-account-create-update-vh6fj\" (UID: \"190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756\") " pod="openstack/cinder-9b3b-account-create-update-vh6fj" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.260952 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-lq5s5"] Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.266628 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-89hnr" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.323648 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-fqzhw"] Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.324743 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-fqzhw" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.329205 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-6hmnw" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.331034 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/471449a2-d6cc-477b-bad9-a616a54f4502-operator-scripts\") pod \"barbican-553f-account-create-update-mhxbb\" (UID: \"471449a2-d6cc-477b-bad9-a616a54f4502\") " pod="openstack/barbican-553f-account-create-update-mhxbb" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.331104 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74drg\" (UniqueName: \"kubernetes.io/projected/226781be-ff54-487b-9180-abaf7d0eda00-kube-api-access-74drg\") pod \"keystone-db-sync-lq5s5\" (UID: \"226781be-ff54-487b-9180-abaf7d0eda00\") " pod="openstack/keystone-db-sync-lq5s5" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.331163 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/226781be-ff54-487b-9180-abaf7d0eda00-config-data\") pod \"keystone-db-sync-lq5s5\" (UID: \"226781be-ff54-487b-9180-abaf7d0eda00\") " pod="openstack/keystone-db-sync-lq5s5" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.331294 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vc7w6\" (UniqueName: \"kubernetes.io/projected/471449a2-d6cc-477b-bad9-a616a54f4502-kube-api-access-vc7w6\") pod \"barbican-553f-account-create-update-mhxbb\" (UID: \"471449a2-d6cc-477b-bad9-a616a54f4502\") " pod="openstack/barbican-553f-account-create-update-mhxbb" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.331335 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/226781be-ff54-487b-9180-abaf7d0eda00-combined-ca-bundle\") pod \"keystone-db-sync-lq5s5\" (UID: \"226781be-ff54-487b-9180-abaf7d0eda00\") " pod="openstack/keystone-db-sync-lq5s5" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.332311 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/471449a2-d6cc-477b-bad9-a616a54f4502-operator-scripts\") pod \"barbican-553f-account-create-update-mhxbb\" (UID: \"471449a2-d6cc-477b-bad9-a616a54f4502\") " pod="openstack/barbican-553f-account-create-update-mhxbb" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.353867 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-fqzhw"] Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.388519 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vc7w6\" (UniqueName: \"kubernetes.io/projected/471449a2-d6cc-477b-bad9-a616a54f4502-kube-api-access-vc7w6\") pod \"barbican-553f-account-create-update-mhxbb\" (UID: \"471449a2-d6cc-477b-bad9-a616a54f4502\") " pod="openstack/barbican-553f-account-create-update-mhxbb" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.424814 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-9b3b-account-create-update-vh6fj" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.433253 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74drg\" (UniqueName: \"kubernetes.io/projected/226781be-ff54-487b-9180-abaf7d0eda00-kube-api-access-74drg\") pod \"keystone-db-sync-lq5s5\" (UID: \"226781be-ff54-487b-9180-abaf7d0eda00\") " pod="openstack/keystone-db-sync-lq5s5" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.433334 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a-operator-scripts\") pod \"neutron-db-create-fqzhw\" (UID: \"c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a\") " pod="openstack/neutron-db-create-fqzhw" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.433365 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hh4hk\" (UniqueName: \"kubernetes.io/projected/c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a-kube-api-access-hh4hk\") pod \"neutron-db-create-fqzhw\" (UID: \"c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a\") " pod="openstack/neutron-db-create-fqzhw" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.433390 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/226781be-ff54-487b-9180-abaf7d0eda00-config-data\") pod \"keystone-db-sync-lq5s5\" (UID: \"226781be-ff54-487b-9180-abaf7d0eda00\") " pod="openstack/keystone-db-sync-lq5s5" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.433419 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/226781be-ff54-487b-9180-abaf7d0eda00-combined-ca-bundle\") pod \"keystone-db-sync-lq5s5\" (UID: \"226781be-ff54-487b-9180-abaf7d0eda00\") " pod="openstack/keystone-db-sync-lq5s5" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.443782 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/226781be-ff54-487b-9180-abaf7d0eda00-combined-ca-bundle\") pod \"keystone-db-sync-lq5s5\" (UID: \"226781be-ff54-487b-9180-abaf7d0eda00\") " pod="openstack/keystone-db-sync-lq5s5" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.450040 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/226781be-ff54-487b-9180-abaf7d0eda00-config-data\") pod \"keystone-db-sync-lq5s5\" (UID: \"226781be-ff54-487b-9180-abaf7d0eda00\") " pod="openstack/keystone-db-sync-lq5s5" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.452204 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-553f-account-create-update-mhxbb" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.463979 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-daa1-account-create-update-gftf9"] Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.464762 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74drg\" (UniqueName: \"kubernetes.io/projected/226781be-ff54-487b-9180-abaf7d0eda00-kube-api-access-74drg\") pod \"keystone-db-sync-lq5s5\" (UID: \"226781be-ff54-487b-9180-abaf7d0eda00\") " pod="openstack/keystone-db-sync-lq5s5" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.465502 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-daa1-account-create-update-gftf9" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.468738 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.479334 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-daa1-account-create-update-gftf9"] Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.535372 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9-operator-scripts\") pod \"neutron-daa1-account-create-update-gftf9\" (UID: \"7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9\") " pod="openstack/neutron-daa1-account-create-update-gftf9" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.535427 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a-operator-scripts\") pod \"neutron-db-create-fqzhw\" (UID: \"c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a\") " pod="openstack/neutron-db-create-fqzhw" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.535461 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hh4hk\" (UniqueName: \"kubernetes.io/projected/c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a-kube-api-access-hh4hk\") pod \"neutron-db-create-fqzhw\" (UID: \"c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a\") " pod="openstack/neutron-db-create-fqzhw" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.535562 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmj7x\" (UniqueName: \"kubernetes.io/projected/7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9-kube-api-access-qmj7x\") pod \"neutron-daa1-account-create-update-gftf9\" (UID: \"7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9\") " pod="openstack/neutron-daa1-account-create-update-gftf9" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.536666 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a-operator-scripts\") pod \"neutron-db-create-fqzhw\" (UID: \"c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a\") " pod="openstack/neutron-db-create-fqzhw" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.557670 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hh4hk\" (UniqueName: \"kubernetes.io/projected/c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a-kube-api-access-hh4hk\") pod \"neutron-db-create-fqzhw\" (UID: \"c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a\") " pod="openstack/neutron-db-create-fqzhw" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.590868 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-lq5s5" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.637315 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9-operator-scripts\") pod \"neutron-daa1-account-create-update-gftf9\" (UID: \"7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9\") " pod="openstack/neutron-daa1-account-create-update-gftf9" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.637456 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmj7x\" (UniqueName: \"kubernetes.io/projected/7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9-kube-api-access-qmj7x\") pod \"neutron-daa1-account-create-update-gftf9\" (UID: \"7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9\") " pod="openstack/neutron-daa1-account-create-update-gftf9" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.638556 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9-operator-scripts\") pod \"neutron-daa1-account-create-update-gftf9\" (UID: \"7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9\") " pod="openstack/neutron-daa1-account-create-update-gftf9" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.653089 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-fqzhw" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.658712 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmj7x\" (UniqueName: \"kubernetes.io/projected/7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9-kube-api-access-qmj7x\") pod \"neutron-daa1-account-create-update-gftf9\" (UID: \"7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9\") " pod="openstack/neutron-daa1-account-create-update-gftf9" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.782409 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-daa1-account-create-update-gftf9" Jan 04 12:09:00 crc kubenswrapper[5003]: I0104 12:09:00.871812 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-89hnr"] Jan 04 12:09:00 crc kubenswrapper[5003]: W0104 12:09:00.882416 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod03157841_c4cb_4d87_b168_ccbe7b52526d.slice/crio-2fa6a4b83588d7d26305992dd740cc2fd8863c34500b98c62a9cd986e01abe74 WatchSource:0}: Error finding container 2fa6a4b83588d7d26305992dd740cc2fd8863c34500b98c62a9cd986e01abe74: Status 404 returned error can't find the container with id 2fa6a4b83588d7d26305992dd740cc2fd8863c34500b98c62a9cd986e01abe74 Jan 04 12:09:02 crc kubenswrapper[5003]: I0104 12:09:01.048692 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-6hmnw"] Jan 04 12:09:02 crc kubenswrapper[5003]: I0104 12:09:01.056184 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-9b3b-account-create-update-vh6fj"] Jan 04 12:09:02 crc kubenswrapper[5003]: I0104 12:09:01.064789 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-89hnr" event={"ID":"03157841-c4cb-4d87-b168-ccbe7b52526d","Type":"ContainerStarted","Data":"2fa6a4b83588d7d26305992dd740cc2fd8863c34500b98c62a9cd986e01abe74"} Jan 04 12:09:02 crc kubenswrapper[5003]: W0104 12:09:01.067479 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96d2524c_4772_4eb6_b108_0513fce70ad8.slice/crio-1e67620f4c61ca7aa65a88f0a18d490a091db6475ec2f8c6bb6c265ee4ccf7ef WatchSource:0}: Error finding container 1e67620f4c61ca7aa65a88f0a18d490a091db6475ec2f8c6bb6c265ee4ccf7ef: Status 404 returned error can't find the container with id 1e67620f4c61ca7aa65a88f0a18d490a091db6475ec2f8c6bb6c265ee4ccf7ef Jan 04 12:09:02 crc kubenswrapper[5003]: I0104 12:09:01.069545 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e387635d-9ef2-4b1d-9303-0d762e8b282c","Type":"ContainerStarted","Data":"c02d818e96b369de068228bec08edd738f084527048823b6a9f1dc73d5473513"} Jan 04 12:09:02 crc kubenswrapper[5003]: I0104 12:09:01.069578 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e387635d-9ef2-4b1d-9303-0d762e8b282c","Type":"ContainerStarted","Data":"f2aa7e67c73edf19a995e58eaf9b8785bfd532521dc609235e9c65097cf71384"} Jan 04 12:09:02 crc kubenswrapper[5003]: W0104 12:09:01.076204 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod190b8ae7_1cb7_46c7_ad59_9dd4f1a3b756.slice/crio-80ee50f48e4d6a5e69c79192df403fb8d70327b294a9e616668352df66c37551 WatchSource:0}: Error finding container 80ee50f48e4d6a5e69c79192df403fb8d70327b294a9e616668352df66c37551: Status 404 returned error can't find the container with id 80ee50f48e4d6a5e69c79192df403fb8d70327b294a9e616668352df66c37551 Jan 04 12:09:02 crc kubenswrapper[5003]: I0104 12:09:01.163872 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-553f-account-create-update-mhxbb"] Jan 04 12:09:02 crc kubenswrapper[5003]: W0104 12:09:01.174091 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod471449a2_d6cc_477b_bad9_a616a54f4502.slice/crio-93d4b99aa8768f6ba0e9b4218bacecb5cf2f1b1fb4a42465d8c1a3f16de353d0 WatchSource:0}: Error finding container 93d4b99aa8768f6ba0e9b4218bacecb5cf2f1b1fb4a42465d8c1a3f16de353d0: Status 404 returned error can't find the container with id 93d4b99aa8768f6ba0e9b4218bacecb5cf2f1b1fb4a42465d8c1a3f16de353d0 Jan 04 12:09:02 crc kubenswrapper[5003]: I0104 12:09:02.090892 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-553f-account-create-update-mhxbb" event={"ID":"471449a2-d6cc-477b-bad9-a616a54f4502","Type":"ContainerStarted","Data":"c823c59a3d914aa9851a47e8f6dff4a4dbfb9007cd526ae9c0b90ac1c3009130"} Jan 04 12:09:02 crc kubenswrapper[5003]: I0104 12:09:02.091365 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-553f-account-create-update-mhxbb" event={"ID":"471449a2-d6cc-477b-bad9-a616a54f4502","Type":"ContainerStarted","Data":"93d4b99aa8768f6ba0e9b4218bacecb5cf2f1b1fb4a42465d8c1a3f16de353d0"} Jan 04 12:09:02 crc kubenswrapper[5003]: I0104 12:09:02.093911 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-89hnr" event={"ID":"03157841-c4cb-4d87-b168-ccbe7b52526d","Type":"ContainerStarted","Data":"74f56090caf846ec9a4ae90e2fdb0c91222039cc17a504b6b4f074cee442f187"} Jan 04 12:09:02 crc kubenswrapper[5003]: I0104 12:09:02.101493 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-6hmnw" event={"ID":"96d2524c-4772-4eb6-b108-0513fce70ad8","Type":"ContainerStarted","Data":"3f15ce4ee52e76ea9afea8573da314348cf1483913042535c37b49fe9a30a4de"} Jan 04 12:09:02 crc kubenswrapper[5003]: I0104 12:09:02.101532 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-6hmnw" event={"ID":"96d2524c-4772-4eb6-b108-0513fce70ad8","Type":"ContainerStarted","Data":"1e67620f4c61ca7aa65a88f0a18d490a091db6475ec2f8c6bb6c265ee4ccf7ef"} Jan 04 12:09:02 crc kubenswrapper[5003]: I0104 12:09:02.104371 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-9b3b-account-create-update-vh6fj" event={"ID":"190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756","Type":"ContainerStarted","Data":"7ea838e99edb97328d68fecabf90f7f29f977b43e387a289ba0a3d36b7ff84a8"} Jan 04 12:09:02 crc kubenswrapper[5003]: I0104 12:09:02.104421 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-9b3b-account-create-update-vh6fj" event={"ID":"190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756","Type":"ContainerStarted","Data":"80ee50f48e4d6a5e69c79192df403fb8d70327b294a9e616668352df66c37551"} Jan 04 12:09:02 crc kubenswrapper[5003]: I0104 12:09:02.111555 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-553f-account-create-update-mhxbb" podStartSLOduration=2.111524279 podStartE2EDuration="2.111524279s" podCreationTimestamp="2026-01-04 12:09:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:02.10808965 +0000 UTC m=+1257.581119491" watchObservedRunningTime="2026-01-04 12:09:02.111524279 +0000 UTC m=+1257.584554140" Jan 04 12:09:02 crc kubenswrapper[5003]: I0104 12:09:02.116212 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e387635d-9ef2-4b1d-9303-0d762e8b282c","Type":"ContainerStarted","Data":"c7da6298aedb8336c663f0f72ed02aee2693e470451ea9ed4c2506018c7c3b8b"} Jan 04 12:09:02 crc kubenswrapper[5003]: I0104 12:09:02.139491 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-create-89hnr" podStartSLOduration=3.13946509 podStartE2EDuration="3.13946509s" podCreationTimestamp="2026-01-04 12:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:02.129112189 +0000 UTC m=+1257.602142040" watchObservedRunningTime="2026-01-04 12:09:02.13946509 +0000 UTC m=+1257.612494941" Jan 04 12:09:02 crc kubenswrapper[5003]: I0104 12:09:02.153092 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-9b3b-account-create-update-vh6fj" podStartSLOduration=3.153071335 podStartE2EDuration="3.153071335s" podCreationTimestamp="2026-01-04 12:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:02.150339744 +0000 UTC m=+1257.623369585" watchObservedRunningTime="2026-01-04 12:09:02.153071335 +0000 UTC m=+1257.626101176" Jan 04 12:09:02 crc kubenswrapper[5003]: I0104 12:09:02.176190 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-create-6hmnw" podStartSLOduration=3.176160009 podStartE2EDuration="3.176160009s" podCreationTimestamp="2026-01-04 12:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:02.17352748 +0000 UTC m=+1257.646557331" watchObservedRunningTime="2026-01-04 12:09:02.176160009 +0000 UTC m=+1257.649189870" Jan 04 12:09:02 crc kubenswrapper[5003]: I0104 12:09:02.378214 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-fqzhw"] Jan 04 12:09:02 crc kubenswrapper[5003]: I0104 12:09:02.388182 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-lq5s5"] Jan 04 12:09:02 crc kubenswrapper[5003]: I0104 12:09:02.549128 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-daa1-account-create-update-gftf9"] Jan 04 12:09:02 crc kubenswrapper[5003]: W0104 12:09:02.646335 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7e9febc9_ba14_4fdd_be87_7bc2cdaa60c9.slice/crio-d4f0b050a0a993af2b41fe8e247d2bb7d7ac31f90e4d29776b5f604152f58ca7 WatchSource:0}: Error finding container d4f0b050a0a993af2b41fe8e247d2bb7d7ac31f90e4d29776b5f604152f58ca7: Status 404 returned error can't find the container with id d4f0b050a0a993af2b41fe8e247d2bb7d7ac31f90e4d29776b5f604152f58ca7 Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.124822 5003 generic.go:334] "Generic (PLEG): container finished" podID="471449a2-d6cc-477b-bad9-a616a54f4502" containerID="c823c59a3d914aa9851a47e8f6dff4a4dbfb9007cd526ae9c0b90ac1c3009130" exitCode=0 Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.124920 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-553f-account-create-update-mhxbb" event={"ID":"471449a2-d6cc-477b-bad9-a616a54f4502","Type":"ContainerDied","Data":"c823c59a3d914aa9851a47e8f6dff4a4dbfb9007cd526ae9c0b90ac1c3009130"} Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.134895 5003 generic.go:334] "Generic (PLEG): container finished" podID="03157841-c4cb-4d87-b168-ccbe7b52526d" containerID="74f56090caf846ec9a4ae90e2fdb0c91222039cc17a504b6b4f074cee442f187" exitCode=0 Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.134966 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-89hnr" event={"ID":"03157841-c4cb-4d87-b168-ccbe7b52526d","Type":"ContainerDied","Data":"74f56090caf846ec9a4ae90e2fdb0c91222039cc17a504b6b4f074cee442f187"} Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.136814 5003 generic.go:334] "Generic (PLEG): container finished" podID="96d2524c-4772-4eb6-b108-0513fce70ad8" containerID="3f15ce4ee52e76ea9afea8573da314348cf1483913042535c37b49fe9a30a4de" exitCode=0 Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.136878 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-6hmnw" event={"ID":"96d2524c-4772-4eb6-b108-0513fce70ad8","Type":"ContainerDied","Data":"3f15ce4ee52e76ea9afea8573da314348cf1483913042535c37b49fe9a30a4de"} Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.141463 5003 generic.go:334] "Generic (PLEG): container finished" podID="190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756" containerID="7ea838e99edb97328d68fecabf90f7f29f977b43e387a289ba0a3d36b7ff84a8" exitCode=0 Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.141506 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-9b3b-account-create-update-vh6fj" event={"ID":"190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756","Type":"ContainerDied","Data":"7ea838e99edb97328d68fecabf90f7f29f977b43e387a289ba0a3d36b7ff84a8"} Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.151755 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e387635d-9ef2-4b1d-9303-0d762e8b282c","Type":"ContainerStarted","Data":"1ff1b1d3338eb5bfcb21f80e2c4c2e6ad020fb1d499df31a3c24c44755e46e60"} Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.151824 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e387635d-9ef2-4b1d-9303-0d762e8b282c","Type":"ContainerStarted","Data":"0f945f657e49547a265f6f68bbeab5475213ad26ac43900d9363d069d96d532d"} Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.151842 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e387635d-9ef2-4b1d-9303-0d762e8b282c","Type":"ContainerStarted","Data":"ed59910d0aea135dbd4ee19aeda59d078e4feebb5abcaf39f16948cd769ad0c6"} Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.151856 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e387635d-9ef2-4b1d-9303-0d762e8b282c","Type":"ContainerStarted","Data":"10f362bf62129cfc68ea5030dc9a630a990d022be1874d460b3a1e2b97c3806b"} Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.154177 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-lq5s5" event={"ID":"226781be-ff54-487b-9180-abaf7d0eda00","Type":"ContainerStarted","Data":"313a2314346602ea7d036af041ca809f2d7203aea23ca4fa740b0807f9021215"} Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.155633 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-daa1-account-create-update-gftf9" event={"ID":"7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9","Type":"ContainerStarted","Data":"d181d379a15a1197fb320c3fc361a12c4273449906a8f258ef951dd0bbad98a7"} Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.155675 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-daa1-account-create-update-gftf9" event={"ID":"7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9","Type":"ContainerStarted","Data":"d4f0b050a0a993af2b41fe8e247d2bb7d7ac31f90e4d29776b5f604152f58ca7"} Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.159057 5003 generic.go:334] "Generic (PLEG): container finished" podID="c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a" containerID="5865ae8b8b354c2f62178946a54a6353e637106245eee816faa47c3f85562bc5" exitCode=0 Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.159108 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-fqzhw" event={"ID":"c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a","Type":"ContainerDied","Data":"5865ae8b8b354c2f62178946a54a6353e637106245eee816faa47c3f85562bc5"} Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.159132 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-fqzhw" event={"ID":"c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a","Type":"ContainerStarted","Data":"6605f0138de95c6a4bd2d4ece296c58a24f23a6bf0dff063c595ec0e27cffc2f"} Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.227858 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-daa1-account-create-update-gftf9" podStartSLOduration=3.227838765 podStartE2EDuration="3.227838765s" podCreationTimestamp="2026-01-04 12:09:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:03.223098831 +0000 UTC m=+1258.696128672" watchObservedRunningTime="2026-01-04 12:09:03.227838765 +0000 UTC m=+1258.700868616" Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.292630 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=21.754821712000002 podStartE2EDuration="39.292586577s" podCreationTimestamp="2026-01-04 12:08:24 +0000 UTC" firstStartedPulling="2026-01-04 12:08:42.439372512 +0000 UTC m=+1237.912402353" lastFinishedPulling="2026-01-04 12:08:59.977137377 +0000 UTC m=+1255.450167218" observedRunningTime="2026-01-04 12:09:03.26591361 +0000 UTC m=+1258.738943461" watchObservedRunningTime="2026-01-04 12:09:03.292586577 +0000 UTC m=+1258.765616418" Jan 04 12:09:03 crc kubenswrapper[5003]: E0104 12:09:03.511440 5003 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7e9febc9_ba14_4fdd_be87_7bc2cdaa60c9.slice/crio-conmon-d181d379a15a1197fb320c3fc361a12c4273449906a8f258ef951dd0bbad98a7.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7e9febc9_ba14_4fdd_be87_7bc2cdaa60c9.slice/crio-d181d379a15a1197fb320c3fc361a12c4273449906a8f258ef951dd0bbad98a7.scope\": RecentStats: unable to find data in memory cache]" Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.560291 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8db84466c-8hs8q"] Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.564189 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8db84466c-8hs8q" Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.567336 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.609192 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8db84466c-8hs8q"] Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.700492 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-dns-svc\") pod \"dnsmasq-dns-8db84466c-8hs8q\" (UID: \"f88e16a0-fb70-4ea8-89c1-0a3397e246e0\") " pod="openstack/dnsmasq-dns-8db84466c-8hs8q" Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.701180 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-config\") pod \"dnsmasq-dns-8db84466c-8hs8q\" (UID: \"f88e16a0-fb70-4ea8-89c1-0a3397e246e0\") " pod="openstack/dnsmasq-dns-8db84466c-8hs8q" Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.701374 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-ovsdbserver-sb\") pod \"dnsmasq-dns-8db84466c-8hs8q\" (UID: \"f88e16a0-fb70-4ea8-89c1-0a3397e246e0\") " pod="openstack/dnsmasq-dns-8db84466c-8hs8q" Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.701493 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-dns-swift-storage-0\") pod \"dnsmasq-dns-8db84466c-8hs8q\" (UID: \"f88e16a0-fb70-4ea8-89c1-0a3397e246e0\") " pod="openstack/dnsmasq-dns-8db84466c-8hs8q" Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.701590 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xld9x\" (UniqueName: \"kubernetes.io/projected/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-kube-api-access-xld9x\") pod \"dnsmasq-dns-8db84466c-8hs8q\" (UID: \"f88e16a0-fb70-4ea8-89c1-0a3397e246e0\") " pod="openstack/dnsmasq-dns-8db84466c-8hs8q" Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.701700 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-ovsdbserver-nb\") pod \"dnsmasq-dns-8db84466c-8hs8q\" (UID: \"f88e16a0-fb70-4ea8-89c1-0a3397e246e0\") " pod="openstack/dnsmasq-dns-8db84466c-8hs8q" Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.803585 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-dns-swift-storage-0\") pod \"dnsmasq-dns-8db84466c-8hs8q\" (UID: \"f88e16a0-fb70-4ea8-89c1-0a3397e246e0\") " pod="openstack/dnsmasq-dns-8db84466c-8hs8q" Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.803660 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xld9x\" (UniqueName: \"kubernetes.io/projected/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-kube-api-access-xld9x\") pod \"dnsmasq-dns-8db84466c-8hs8q\" (UID: \"f88e16a0-fb70-4ea8-89c1-0a3397e246e0\") " pod="openstack/dnsmasq-dns-8db84466c-8hs8q" Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.803685 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-ovsdbserver-nb\") pod \"dnsmasq-dns-8db84466c-8hs8q\" (UID: \"f88e16a0-fb70-4ea8-89c1-0a3397e246e0\") " pod="openstack/dnsmasq-dns-8db84466c-8hs8q" Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.803732 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-dns-svc\") pod \"dnsmasq-dns-8db84466c-8hs8q\" (UID: \"f88e16a0-fb70-4ea8-89c1-0a3397e246e0\") " pod="openstack/dnsmasq-dns-8db84466c-8hs8q" Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.803771 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-config\") pod \"dnsmasq-dns-8db84466c-8hs8q\" (UID: \"f88e16a0-fb70-4ea8-89c1-0a3397e246e0\") " pod="openstack/dnsmasq-dns-8db84466c-8hs8q" Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.803849 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-ovsdbserver-sb\") pod \"dnsmasq-dns-8db84466c-8hs8q\" (UID: \"f88e16a0-fb70-4ea8-89c1-0a3397e246e0\") " pod="openstack/dnsmasq-dns-8db84466c-8hs8q" Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.805057 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-ovsdbserver-sb\") pod \"dnsmasq-dns-8db84466c-8hs8q\" (UID: \"f88e16a0-fb70-4ea8-89c1-0a3397e246e0\") " pod="openstack/dnsmasq-dns-8db84466c-8hs8q" Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.805168 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-ovsdbserver-nb\") pod \"dnsmasq-dns-8db84466c-8hs8q\" (UID: \"f88e16a0-fb70-4ea8-89c1-0a3397e246e0\") " pod="openstack/dnsmasq-dns-8db84466c-8hs8q" Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.805534 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-dns-swift-storage-0\") pod \"dnsmasq-dns-8db84466c-8hs8q\" (UID: \"f88e16a0-fb70-4ea8-89c1-0a3397e246e0\") " pod="openstack/dnsmasq-dns-8db84466c-8hs8q" Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.806148 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-config\") pod \"dnsmasq-dns-8db84466c-8hs8q\" (UID: \"f88e16a0-fb70-4ea8-89c1-0a3397e246e0\") " pod="openstack/dnsmasq-dns-8db84466c-8hs8q" Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.808586 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-dns-svc\") pod \"dnsmasq-dns-8db84466c-8hs8q\" (UID: \"f88e16a0-fb70-4ea8-89c1-0a3397e246e0\") " pod="openstack/dnsmasq-dns-8db84466c-8hs8q" Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.826633 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xld9x\" (UniqueName: \"kubernetes.io/projected/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-kube-api-access-xld9x\") pod \"dnsmasq-dns-8db84466c-8hs8q\" (UID: \"f88e16a0-fb70-4ea8-89c1-0a3397e246e0\") " pod="openstack/dnsmasq-dns-8db84466c-8hs8q" Jan 04 12:09:03 crc kubenswrapper[5003]: I0104 12:09:03.882696 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8db84466c-8hs8q" Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.179974 5003 generic.go:334] "Generic (PLEG): container finished" podID="7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9" containerID="d181d379a15a1197fb320c3fc361a12c4273449906a8f258ef951dd0bbad98a7" exitCode=0 Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.180131 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-daa1-account-create-update-gftf9" event={"ID":"7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9","Type":"ContainerDied","Data":"d181d379a15a1197fb320c3fc361a12c4273449906a8f258ef951dd0bbad98a7"} Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.386768 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8db84466c-8hs8q"] Jan 04 12:09:04 crc kubenswrapper[5003]: W0104 12:09:04.402715 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf88e16a0_fb70_4ea8_89c1_0a3397e246e0.slice/crio-364c23a8ae77def1e183d40fb84f2e56f6ef42933891fe3909da2289aaf5c53e WatchSource:0}: Error finding container 364c23a8ae77def1e183d40fb84f2e56f6ef42933891fe3909da2289aaf5c53e: Status 404 returned error can't find the container with id 364c23a8ae77def1e183d40fb84f2e56f6ef42933891fe3909da2289aaf5c53e Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.667875 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-6hmnw" Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.669045 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-fqzhw" Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.733448 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-9b3b-account-create-update-vh6fj" Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.741893 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-553f-account-create-update-mhxbb" Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.758294 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-89hnr" Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.822735 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r8b5x\" (UniqueName: \"kubernetes.io/projected/190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756-kube-api-access-r8b5x\") pod \"190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756\" (UID: \"190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756\") " Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.822863 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756-operator-scripts\") pod \"190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756\" (UID: \"190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756\") " Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.822891 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hh4hk\" (UniqueName: \"kubernetes.io/projected/c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a-kube-api-access-hh4hk\") pod \"c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a\" (UID: \"c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a\") " Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.822934 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96d2524c-4772-4eb6-b108-0513fce70ad8-operator-scripts\") pod \"96d2524c-4772-4eb6-b108-0513fce70ad8\" (UID: \"96d2524c-4772-4eb6-b108-0513fce70ad8\") " Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.822956 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a-operator-scripts\") pod \"c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a\" (UID: \"c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a\") " Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.822993 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f8z5v\" (UniqueName: \"kubernetes.io/projected/96d2524c-4772-4eb6-b108-0513fce70ad8-kube-api-access-f8z5v\") pod \"96d2524c-4772-4eb6-b108-0513fce70ad8\" (UID: \"96d2524c-4772-4eb6-b108-0513fce70ad8\") " Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.828119 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96d2524c-4772-4eb6-b108-0513fce70ad8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "96d2524c-4772-4eb6-b108-0513fce70ad8" (UID: "96d2524c-4772-4eb6-b108-0513fce70ad8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.828343 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756" (UID: "190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.833181 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a" (UID: "c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.835976 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756-kube-api-access-r8b5x" (OuterVolumeSpecName: "kube-api-access-r8b5x") pod "190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756" (UID: "190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756"). InnerVolumeSpecName "kube-api-access-r8b5x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.836452 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96d2524c-4772-4eb6-b108-0513fce70ad8-kube-api-access-f8z5v" (OuterVolumeSpecName: "kube-api-access-f8z5v") pod "96d2524c-4772-4eb6-b108-0513fce70ad8" (UID: "96d2524c-4772-4eb6-b108-0513fce70ad8"). InnerVolumeSpecName "kube-api-access-f8z5v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.847123 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a-kube-api-access-hh4hk" (OuterVolumeSpecName: "kube-api-access-hh4hk") pod "c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a" (UID: "c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a"). InnerVolumeSpecName "kube-api-access-hh4hk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.923945 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vc7w6\" (UniqueName: \"kubernetes.io/projected/471449a2-d6cc-477b-bad9-a616a54f4502-kube-api-access-vc7w6\") pod \"471449a2-d6cc-477b-bad9-a616a54f4502\" (UID: \"471449a2-d6cc-477b-bad9-a616a54f4502\") " Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.924074 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qzjcn\" (UniqueName: \"kubernetes.io/projected/03157841-c4cb-4d87-b168-ccbe7b52526d-kube-api-access-qzjcn\") pod \"03157841-c4cb-4d87-b168-ccbe7b52526d\" (UID: \"03157841-c4cb-4d87-b168-ccbe7b52526d\") " Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.924166 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/471449a2-d6cc-477b-bad9-a616a54f4502-operator-scripts\") pod \"471449a2-d6cc-477b-bad9-a616a54f4502\" (UID: \"471449a2-d6cc-477b-bad9-a616a54f4502\") " Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.924201 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/03157841-c4cb-4d87-b168-ccbe7b52526d-operator-scripts\") pod \"03157841-c4cb-4d87-b168-ccbe7b52526d\" (UID: \"03157841-c4cb-4d87-b168-ccbe7b52526d\") " Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.924627 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/471449a2-d6cc-477b-bad9-a616a54f4502-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "471449a2-d6cc-477b-bad9-a616a54f4502" (UID: "471449a2-d6cc-477b-bad9-a616a54f4502"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.924747 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/03157841-c4cb-4d87-b168-ccbe7b52526d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "03157841-c4cb-4d87-b168-ccbe7b52526d" (UID: "03157841-c4cb-4d87-b168-ccbe7b52526d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.925795 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96d2524c-4772-4eb6-b108-0513fce70ad8-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.925815 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.925824 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/471449a2-d6cc-477b-bad9-a616a54f4502-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.925832 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/03157841-c4cb-4d87-b168-ccbe7b52526d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.925843 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f8z5v\" (UniqueName: \"kubernetes.io/projected/96d2524c-4772-4eb6-b108-0513fce70ad8-kube-api-access-f8z5v\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.925854 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r8b5x\" (UniqueName: \"kubernetes.io/projected/190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756-kube-api-access-r8b5x\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.925864 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.925873 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hh4hk\" (UniqueName: \"kubernetes.io/projected/c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a-kube-api-access-hh4hk\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.927756 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03157841-c4cb-4d87-b168-ccbe7b52526d-kube-api-access-qzjcn" (OuterVolumeSpecName: "kube-api-access-qzjcn") pod "03157841-c4cb-4d87-b168-ccbe7b52526d" (UID: "03157841-c4cb-4d87-b168-ccbe7b52526d"). InnerVolumeSpecName "kube-api-access-qzjcn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:04 crc kubenswrapper[5003]: I0104 12:09:04.928249 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/471449a2-d6cc-477b-bad9-a616a54f4502-kube-api-access-vc7w6" (OuterVolumeSpecName: "kube-api-access-vc7w6") pod "471449a2-d6cc-477b-bad9-a616a54f4502" (UID: "471449a2-d6cc-477b-bad9-a616a54f4502"). InnerVolumeSpecName "kube-api-access-vc7w6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:05 crc kubenswrapper[5003]: I0104 12:09:05.027576 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vc7w6\" (UniqueName: \"kubernetes.io/projected/471449a2-d6cc-477b-bad9-a616a54f4502-kube-api-access-vc7w6\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:05 crc kubenswrapper[5003]: I0104 12:09:05.027630 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qzjcn\" (UniqueName: \"kubernetes.io/projected/03157841-c4cb-4d87-b168-ccbe7b52526d-kube-api-access-qzjcn\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:05 crc kubenswrapper[5003]: I0104 12:09:05.216333 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-fqzhw" event={"ID":"c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a","Type":"ContainerDied","Data":"6605f0138de95c6a4bd2d4ece296c58a24f23a6bf0dff063c595ec0e27cffc2f"} Jan 04 12:09:05 crc kubenswrapper[5003]: I0104 12:09:05.216402 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6605f0138de95c6a4bd2d4ece296c58a24f23a6bf0dff063c595ec0e27cffc2f" Jan 04 12:09:05 crc kubenswrapper[5003]: I0104 12:09:05.216539 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-fqzhw" Jan 04 12:09:05 crc kubenswrapper[5003]: I0104 12:09:05.223456 5003 generic.go:334] "Generic (PLEG): container finished" podID="f88e16a0-fb70-4ea8-89c1-0a3397e246e0" containerID="fc96ecd6b4da1b59dea246c08c2535bce123d49b5fff66d672320cccfc32ed03" exitCode=0 Jan 04 12:09:05 crc kubenswrapper[5003]: I0104 12:09:05.223524 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8db84466c-8hs8q" event={"ID":"f88e16a0-fb70-4ea8-89c1-0a3397e246e0","Type":"ContainerDied","Data":"fc96ecd6b4da1b59dea246c08c2535bce123d49b5fff66d672320cccfc32ed03"} Jan 04 12:09:05 crc kubenswrapper[5003]: I0104 12:09:05.223584 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8db84466c-8hs8q" event={"ID":"f88e16a0-fb70-4ea8-89c1-0a3397e246e0","Type":"ContainerStarted","Data":"364c23a8ae77def1e183d40fb84f2e56f6ef42933891fe3909da2289aaf5c53e"} Jan 04 12:09:05 crc kubenswrapper[5003]: I0104 12:09:05.227878 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-553f-account-create-update-mhxbb" Jan 04 12:09:05 crc kubenswrapper[5003]: I0104 12:09:05.228002 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-553f-account-create-update-mhxbb" event={"ID":"471449a2-d6cc-477b-bad9-a616a54f4502","Type":"ContainerDied","Data":"93d4b99aa8768f6ba0e9b4218bacecb5cf2f1b1fb4a42465d8c1a3f16de353d0"} Jan 04 12:09:05 crc kubenswrapper[5003]: I0104 12:09:05.228047 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="93d4b99aa8768f6ba0e9b4218bacecb5cf2f1b1fb4a42465d8c1a3f16de353d0" Jan 04 12:09:05 crc kubenswrapper[5003]: I0104 12:09:05.239278 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-89hnr" event={"ID":"03157841-c4cb-4d87-b168-ccbe7b52526d","Type":"ContainerDied","Data":"2fa6a4b83588d7d26305992dd740cc2fd8863c34500b98c62a9cd986e01abe74"} Jan 04 12:09:05 crc kubenswrapper[5003]: I0104 12:09:05.239323 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2fa6a4b83588d7d26305992dd740cc2fd8863c34500b98c62a9cd986e01abe74" Jan 04 12:09:05 crc kubenswrapper[5003]: I0104 12:09:05.239407 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-89hnr" Jan 04 12:09:05 crc kubenswrapper[5003]: I0104 12:09:05.249962 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-6hmnw" Jan 04 12:09:05 crc kubenswrapper[5003]: I0104 12:09:05.250118 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-6hmnw" event={"ID":"96d2524c-4772-4eb6-b108-0513fce70ad8","Type":"ContainerDied","Data":"1e67620f4c61ca7aa65a88f0a18d490a091db6475ec2f8c6bb6c265ee4ccf7ef"} Jan 04 12:09:05 crc kubenswrapper[5003]: I0104 12:09:05.250402 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1e67620f4c61ca7aa65a88f0a18d490a091db6475ec2f8c6bb6c265ee4ccf7ef" Jan 04 12:09:05 crc kubenswrapper[5003]: I0104 12:09:05.253654 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-9b3b-account-create-update-vh6fj" event={"ID":"190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756","Type":"ContainerDied","Data":"80ee50f48e4d6a5e69c79192df403fb8d70327b294a9e616668352df66c37551"} Jan 04 12:09:05 crc kubenswrapper[5003]: I0104 12:09:05.253718 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="80ee50f48e4d6a5e69c79192df403fb8d70327b294a9e616668352df66c37551" Jan 04 12:09:05 crc kubenswrapper[5003]: I0104 12:09:05.253762 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-9b3b-account-create-update-vh6fj" Jan 04 12:09:08 crc kubenswrapper[5003]: I0104 12:09:08.866985 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-daa1-account-create-update-gftf9" Jan 04 12:09:08 crc kubenswrapper[5003]: I0104 12:09:08.919733 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9-operator-scripts\") pod \"7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9\" (UID: \"7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9\") " Jan 04 12:09:08 crc kubenswrapper[5003]: I0104 12:09:08.920083 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmj7x\" (UniqueName: \"kubernetes.io/projected/7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9-kube-api-access-qmj7x\") pod \"7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9\" (UID: \"7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9\") " Jan 04 12:09:08 crc kubenswrapper[5003]: I0104 12:09:08.929798 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9" (UID: "7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:08 crc kubenswrapper[5003]: I0104 12:09:08.930559 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9-kube-api-access-qmj7x" (OuterVolumeSpecName: "kube-api-access-qmj7x") pod "7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9" (UID: "7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9"). InnerVolumeSpecName "kube-api-access-qmj7x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:09 crc kubenswrapper[5003]: I0104 12:09:09.024150 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:09 crc kubenswrapper[5003]: I0104 12:09:09.024183 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmj7x\" (UniqueName: \"kubernetes.io/projected/7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9-kube-api-access-qmj7x\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:09 crc kubenswrapper[5003]: I0104 12:09:09.291382 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8db84466c-8hs8q" event={"ID":"f88e16a0-fb70-4ea8-89c1-0a3397e246e0","Type":"ContainerStarted","Data":"26c615be30307321a60553ff868cf3f2110b0cf2b3568f29830c1b3b02f8ddc0"} Jan 04 12:09:09 crc kubenswrapper[5003]: I0104 12:09:09.291508 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8db84466c-8hs8q" Jan 04 12:09:09 crc kubenswrapper[5003]: I0104 12:09:09.293602 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-lq5s5" event={"ID":"226781be-ff54-487b-9180-abaf7d0eda00","Type":"ContainerStarted","Data":"17b7b9bcee83dae61356b58e517a5e5f714e66ccb04fc94b80e784c8833fac4c"} Jan 04 12:09:09 crc kubenswrapper[5003]: I0104 12:09:09.296886 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-daa1-account-create-update-gftf9" event={"ID":"7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9","Type":"ContainerDied","Data":"d4f0b050a0a993af2b41fe8e247d2bb7d7ac31f90e4d29776b5f604152f58ca7"} Jan 04 12:09:09 crc kubenswrapper[5003]: I0104 12:09:09.296934 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d4f0b050a0a993af2b41fe8e247d2bb7d7ac31f90e4d29776b5f604152f58ca7" Jan 04 12:09:09 crc kubenswrapper[5003]: I0104 12:09:09.296997 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-daa1-account-create-update-gftf9" Jan 04 12:09:09 crc kubenswrapper[5003]: I0104 12:09:09.318553 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8db84466c-8hs8q" podStartSLOduration=6.318532117 podStartE2EDuration="6.318532117s" podCreationTimestamp="2026-01-04 12:09:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:09.310684932 +0000 UTC m=+1264.783714783" watchObservedRunningTime="2026-01-04 12:09:09.318532117 +0000 UTC m=+1264.791561948" Jan 04 12:09:09 crc kubenswrapper[5003]: I0104 12:09:09.830961 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-lq5s5" podStartSLOduration=3.537884618 podStartE2EDuration="9.830939889s" podCreationTimestamp="2026-01-04 12:09:00 +0000 UTC" firstStartedPulling="2026-01-04 12:09:02.424445608 +0000 UTC m=+1257.897475449" lastFinishedPulling="2026-01-04 12:09:08.717500869 +0000 UTC m=+1264.190530720" observedRunningTime="2026-01-04 12:09:09.331300401 +0000 UTC m=+1264.804330242" watchObservedRunningTime="2026-01-04 12:09:09.830939889 +0000 UTC m=+1265.303969740" Jan 04 12:09:11 crc kubenswrapper[5003]: I0104 12:09:11.316908 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-jvhdz" event={"ID":"95845d2e-e8cf-4d56-ad63-260115e0efc8","Type":"ContainerStarted","Data":"41ebcad121f9a4824ae310763b6db5ab85e6345eca024178476fc82f67538ebe"} Jan 04 12:09:11 crc kubenswrapper[5003]: I0104 12:09:11.338566 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-jvhdz" podStartSLOduration=2.084301021 podStartE2EDuration="33.338541871s" podCreationTimestamp="2026-01-04 12:08:38 +0000 UTC" firstStartedPulling="2026-01-04 12:08:39.011265697 +0000 UTC m=+1234.484295538" lastFinishedPulling="2026-01-04 12:09:10.265506547 +0000 UTC m=+1265.738536388" observedRunningTime="2026-01-04 12:09:11.333115569 +0000 UTC m=+1266.806145420" watchObservedRunningTime="2026-01-04 12:09:11.338541871 +0000 UTC m=+1266.811571742" Jan 04 12:09:12 crc kubenswrapper[5003]: I0104 12:09:12.325325 5003 generic.go:334] "Generic (PLEG): container finished" podID="226781be-ff54-487b-9180-abaf7d0eda00" containerID="17b7b9bcee83dae61356b58e517a5e5f714e66ccb04fc94b80e784c8833fac4c" exitCode=0 Jan 04 12:09:12 crc kubenswrapper[5003]: I0104 12:09:12.325396 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-lq5s5" event={"ID":"226781be-ff54-487b-9180-abaf7d0eda00","Type":"ContainerDied","Data":"17b7b9bcee83dae61356b58e517a5e5f714e66ccb04fc94b80e784c8833fac4c"} Jan 04 12:09:13 crc kubenswrapper[5003]: I0104 12:09:13.727090 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-lq5s5" Jan 04 12:09:13 crc kubenswrapper[5003]: I0104 12:09:13.813561 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/226781be-ff54-487b-9180-abaf7d0eda00-config-data\") pod \"226781be-ff54-487b-9180-abaf7d0eda00\" (UID: \"226781be-ff54-487b-9180-abaf7d0eda00\") " Jan 04 12:09:13 crc kubenswrapper[5003]: I0104 12:09:13.814524 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/226781be-ff54-487b-9180-abaf7d0eda00-combined-ca-bundle\") pod \"226781be-ff54-487b-9180-abaf7d0eda00\" (UID: \"226781be-ff54-487b-9180-abaf7d0eda00\") " Jan 04 12:09:13 crc kubenswrapper[5003]: I0104 12:09:13.814698 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74drg\" (UniqueName: \"kubernetes.io/projected/226781be-ff54-487b-9180-abaf7d0eda00-kube-api-access-74drg\") pod \"226781be-ff54-487b-9180-abaf7d0eda00\" (UID: \"226781be-ff54-487b-9180-abaf7d0eda00\") " Jan 04 12:09:13 crc kubenswrapper[5003]: I0104 12:09:13.819993 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/226781be-ff54-487b-9180-abaf7d0eda00-kube-api-access-74drg" (OuterVolumeSpecName: "kube-api-access-74drg") pod "226781be-ff54-487b-9180-abaf7d0eda00" (UID: "226781be-ff54-487b-9180-abaf7d0eda00"). InnerVolumeSpecName "kube-api-access-74drg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:13 crc kubenswrapper[5003]: E0104 12:09:13.825969 5003 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc76d3acd_e992_42b8_8dcc_0f5f9ddbd02a.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96d2524c_4772_4eb6_b108_0513fce70ad8.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96d2524c_4772_4eb6_b108_0513fce70ad8.slice/crio-1e67620f4c61ca7aa65a88f0a18d490a091db6475ec2f8c6bb6c265ee4ccf7ef\": RecentStats: unable to find data in memory cache]" Jan 04 12:09:13 crc kubenswrapper[5003]: I0104 12:09:13.840675 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/226781be-ff54-487b-9180-abaf7d0eda00-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "226781be-ff54-487b-9180-abaf7d0eda00" (UID: "226781be-ff54-487b-9180-abaf7d0eda00"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:13 crc kubenswrapper[5003]: I0104 12:09:13.857397 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/226781be-ff54-487b-9180-abaf7d0eda00-config-data" (OuterVolumeSpecName: "config-data") pod "226781be-ff54-487b-9180-abaf7d0eda00" (UID: "226781be-ff54-487b-9180-abaf7d0eda00"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:13 crc kubenswrapper[5003]: I0104 12:09:13.885332 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8db84466c-8hs8q" Jan 04 12:09:13 crc kubenswrapper[5003]: I0104 12:09:13.916907 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/226781be-ff54-487b-9180-abaf7d0eda00-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:13 crc kubenswrapper[5003]: I0104 12:09:13.916952 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74drg\" (UniqueName: \"kubernetes.io/projected/226781be-ff54-487b-9180-abaf7d0eda00-kube-api-access-74drg\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:13 crc kubenswrapper[5003]: I0104 12:09:13.916968 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/226781be-ff54-487b-9180-abaf7d0eda00-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:13 crc kubenswrapper[5003]: I0104 12:09:13.945253 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67fdf7998c-npwcd"] Jan 04 12:09:13 crc kubenswrapper[5003]: I0104 12:09:13.945518 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-67fdf7998c-npwcd" podUID="33a31de1-636d-416a-a0a3-1f5cb2de8ce1" containerName="dnsmasq-dns" containerID="cri-o://063905b1821057f05e527f5e64b236cf14da463fb596547b824d8ec3c45b2ac8" gracePeriod=10 Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.314780 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67fdf7998c-npwcd" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.344241 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-lq5s5" event={"ID":"226781be-ff54-487b-9180-abaf7d0eda00","Type":"ContainerDied","Data":"313a2314346602ea7d036af041ca809f2d7203aea23ca4fa740b0807f9021215"} Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.344288 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="313a2314346602ea7d036af041ca809f2d7203aea23ca4fa740b0807f9021215" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.344352 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-lq5s5" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.359857 5003 generic.go:334] "Generic (PLEG): container finished" podID="33a31de1-636d-416a-a0a3-1f5cb2de8ce1" containerID="063905b1821057f05e527f5e64b236cf14da463fb596547b824d8ec3c45b2ac8" exitCode=0 Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.359942 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67fdf7998c-npwcd" event={"ID":"33a31de1-636d-416a-a0a3-1f5cb2de8ce1","Type":"ContainerDied","Data":"063905b1821057f05e527f5e64b236cf14da463fb596547b824d8ec3c45b2ac8"} Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.359985 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67fdf7998c-npwcd" event={"ID":"33a31de1-636d-416a-a0a3-1f5cb2de8ce1","Type":"ContainerDied","Data":"6dfe568c31772bd3f0a29cae3451cab2c92d078303f46605e9dcb8295f888360"} Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.360025 5003 scope.go:117] "RemoveContainer" containerID="063905b1821057f05e527f5e64b236cf14da463fb596547b824d8ec3c45b2ac8" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.360253 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67fdf7998c-npwcd" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.399836 5003 scope.go:117] "RemoveContainer" containerID="c8b19130b9f9dcd1a1e6ba1282dff9ff15372e0d198b03af0cf08830634c7998" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.427931 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/33a31de1-636d-416a-a0a3-1f5cb2de8ce1-ovsdbserver-sb\") pod \"33a31de1-636d-416a-a0a3-1f5cb2de8ce1\" (UID: \"33a31de1-636d-416a-a0a3-1f5cb2de8ce1\") " Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.428169 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/33a31de1-636d-416a-a0a3-1f5cb2de8ce1-dns-svc\") pod \"33a31de1-636d-416a-a0a3-1f5cb2de8ce1\" (UID: \"33a31de1-636d-416a-a0a3-1f5cb2de8ce1\") " Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.428245 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dl25f\" (UniqueName: \"kubernetes.io/projected/33a31de1-636d-416a-a0a3-1f5cb2de8ce1-kube-api-access-dl25f\") pod \"33a31de1-636d-416a-a0a3-1f5cb2de8ce1\" (UID: \"33a31de1-636d-416a-a0a3-1f5cb2de8ce1\") " Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.428261 5003 scope.go:117] "RemoveContainer" containerID="063905b1821057f05e527f5e64b236cf14da463fb596547b824d8ec3c45b2ac8" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.428319 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33a31de1-636d-416a-a0a3-1f5cb2de8ce1-config\") pod \"33a31de1-636d-416a-a0a3-1f5cb2de8ce1\" (UID: \"33a31de1-636d-416a-a0a3-1f5cb2de8ce1\") " Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.428687 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/33a31de1-636d-416a-a0a3-1f5cb2de8ce1-ovsdbserver-nb\") pod \"33a31de1-636d-416a-a0a3-1f5cb2de8ce1\" (UID: \"33a31de1-636d-416a-a0a3-1f5cb2de8ce1\") " Jan 04 12:09:14 crc kubenswrapper[5003]: E0104 12:09:14.429063 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"063905b1821057f05e527f5e64b236cf14da463fb596547b824d8ec3c45b2ac8\": container with ID starting with 063905b1821057f05e527f5e64b236cf14da463fb596547b824d8ec3c45b2ac8 not found: ID does not exist" containerID="063905b1821057f05e527f5e64b236cf14da463fb596547b824d8ec3c45b2ac8" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.429122 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"063905b1821057f05e527f5e64b236cf14da463fb596547b824d8ec3c45b2ac8"} err="failed to get container status \"063905b1821057f05e527f5e64b236cf14da463fb596547b824d8ec3c45b2ac8\": rpc error: code = NotFound desc = could not find container \"063905b1821057f05e527f5e64b236cf14da463fb596547b824d8ec3c45b2ac8\": container with ID starting with 063905b1821057f05e527f5e64b236cf14da463fb596547b824d8ec3c45b2ac8 not found: ID does not exist" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.429147 5003 scope.go:117] "RemoveContainer" containerID="c8b19130b9f9dcd1a1e6ba1282dff9ff15372e0d198b03af0cf08830634c7998" Jan 04 12:09:14 crc kubenswrapper[5003]: E0104 12:09:14.429439 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8b19130b9f9dcd1a1e6ba1282dff9ff15372e0d198b03af0cf08830634c7998\": container with ID starting with c8b19130b9f9dcd1a1e6ba1282dff9ff15372e0d198b03af0cf08830634c7998 not found: ID does not exist" containerID="c8b19130b9f9dcd1a1e6ba1282dff9ff15372e0d198b03af0cf08830634c7998" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.429468 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8b19130b9f9dcd1a1e6ba1282dff9ff15372e0d198b03af0cf08830634c7998"} err="failed to get container status \"c8b19130b9f9dcd1a1e6ba1282dff9ff15372e0d198b03af0cf08830634c7998\": rpc error: code = NotFound desc = could not find container \"c8b19130b9f9dcd1a1e6ba1282dff9ff15372e0d198b03af0cf08830634c7998\": container with ID starting with c8b19130b9f9dcd1a1e6ba1282dff9ff15372e0d198b03af0cf08830634c7998 not found: ID does not exist" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.434702 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33a31de1-636d-416a-a0a3-1f5cb2de8ce1-kube-api-access-dl25f" (OuterVolumeSpecName: "kube-api-access-dl25f") pod "33a31de1-636d-416a-a0a3-1f5cb2de8ce1" (UID: "33a31de1-636d-416a-a0a3-1f5cb2de8ce1"). InnerVolumeSpecName "kube-api-access-dl25f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.473109 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33a31de1-636d-416a-a0a3-1f5cb2de8ce1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "33a31de1-636d-416a-a0a3-1f5cb2de8ce1" (UID: "33a31de1-636d-416a-a0a3-1f5cb2de8ce1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.473433 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33a31de1-636d-416a-a0a3-1f5cb2de8ce1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "33a31de1-636d-416a-a0a3-1f5cb2de8ce1" (UID: "33a31de1-636d-416a-a0a3-1f5cb2de8ce1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.475938 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33a31de1-636d-416a-a0a3-1f5cb2de8ce1-config" (OuterVolumeSpecName: "config") pod "33a31de1-636d-416a-a0a3-1f5cb2de8ce1" (UID: "33a31de1-636d-416a-a0a3-1f5cb2de8ce1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.477321 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33a31de1-636d-416a-a0a3-1f5cb2de8ce1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "33a31de1-636d-416a-a0a3-1f5cb2de8ce1" (UID: "33a31de1-636d-416a-a0a3-1f5cb2de8ce1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.531441 5003 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/33a31de1-636d-416a-a0a3-1f5cb2de8ce1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.531473 5003 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/33a31de1-636d-416a-a0a3-1f5cb2de8ce1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.531483 5003 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/33a31de1-636d-416a-a0a3-1f5cb2de8ce1-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.531493 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dl25f\" (UniqueName: \"kubernetes.io/projected/33a31de1-636d-416a-a0a3-1f5cb2de8ce1-kube-api-access-dl25f\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.531504 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33a31de1-636d-416a-a0a3-1f5cb2de8ce1-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.604996 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-767d96458c-sz57m"] Jan 04 12:09:14 crc kubenswrapper[5003]: E0104 12:09:14.605459 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9" containerName="mariadb-account-create-update" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.605479 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9" containerName="mariadb-account-create-update" Jan 04 12:09:14 crc kubenswrapper[5003]: E0104 12:09:14.605508 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a" containerName="mariadb-database-create" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.605519 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a" containerName="mariadb-database-create" Jan 04 12:09:14 crc kubenswrapper[5003]: E0104 12:09:14.605533 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33a31de1-636d-416a-a0a3-1f5cb2de8ce1" containerName="dnsmasq-dns" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.605542 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="33a31de1-636d-416a-a0a3-1f5cb2de8ce1" containerName="dnsmasq-dns" Jan 04 12:09:14 crc kubenswrapper[5003]: E0104 12:09:14.610518 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="226781be-ff54-487b-9180-abaf7d0eda00" containerName="keystone-db-sync" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.610556 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="226781be-ff54-487b-9180-abaf7d0eda00" containerName="keystone-db-sync" Jan 04 12:09:14 crc kubenswrapper[5003]: E0104 12:09:14.610573 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96d2524c-4772-4eb6-b108-0513fce70ad8" containerName="mariadb-database-create" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.610585 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="96d2524c-4772-4eb6-b108-0513fce70ad8" containerName="mariadb-database-create" Jan 04 12:09:14 crc kubenswrapper[5003]: E0104 12:09:14.610604 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03157841-c4cb-4d87-b168-ccbe7b52526d" containerName="mariadb-database-create" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.610612 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="03157841-c4cb-4d87-b168-ccbe7b52526d" containerName="mariadb-database-create" Jan 04 12:09:14 crc kubenswrapper[5003]: E0104 12:09:14.610624 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756" containerName="mariadb-account-create-update" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.610632 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756" containerName="mariadb-account-create-update" Jan 04 12:09:14 crc kubenswrapper[5003]: E0104 12:09:14.610660 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="471449a2-d6cc-477b-bad9-a616a54f4502" containerName="mariadb-account-create-update" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.610668 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="471449a2-d6cc-477b-bad9-a616a54f4502" containerName="mariadb-account-create-update" Jan 04 12:09:14 crc kubenswrapper[5003]: E0104 12:09:14.610678 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33a31de1-636d-416a-a0a3-1f5cb2de8ce1" containerName="init" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.610686 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="33a31de1-636d-416a-a0a3-1f5cb2de8ce1" containerName="init" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.611032 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9" containerName="mariadb-account-create-update" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.611056 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="226781be-ff54-487b-9180-abaf7d0eda00" containerName="keystone-db-sync" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.611068 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="33a31de1-636d-416a-a0a3-1f5cb2de8ce1" containerName="dnsmasq-dns" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.611085 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="96d2524c-4772-4eb6-b108-0513fce70ad8" containerName="mariadb-database-create" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.611096 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a" containerName="mariadb-database-create" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.611115 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756" containerName="mariadb-account-create-update" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.611124 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="03157841-c4cb-4d87-b168-ccbe7b52526d" containerName="mariadb-database-create" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.611140 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="471449a2-d6cc-477b-bad9-a616a54f4502" containerName="mariadb-account-create-update" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.612386 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-767d96458c-sz57m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.625359 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-767d96458c-sz57m"] Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.632804 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed3610d7-f747-4c99-b42c-35f1b05e17b4-dns-svc\") pod \"dnsmasq-dns-767d96458c-sz57m\" (UID: \"ed3610d7-f747-4c99-b42c-35f1b05e17b4\") " pod="openstack/dnsmasq-dns-767d96458c-sz57m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.641907 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ed3610d7-f747-4c99-b42c-35f1b05e17b4-ovsdbserver-nb\") pod \"dnsmasq-dns-767d96458c-sz57m\" (UID: \"ed3610d7-f747-4c99-b42c-35f1b05e17b4\") " pod="openstack/dnsmasq-dns-767d96458c-sz57m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.641967 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ed3610d7-f747-4c99-b42c-35f1b05e17b4-dns-swift-storage-0\") pod \"dnsmasq-dns-767d96458c-sz57m\" (UID: \"ed3610d7-f747-4c99-b42c-35f1b05e17b4\") " pod="openstack/dnsmasq-dns-767d96458c-sz57m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.642133 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrq2s\" (UniqueName: \"kubernetes.io/projected/ed3610d7-f747-4c99-b42c-35f1b05e17b4-kube-api-access-lrq2s\") pod \"dnsmasq-dns-767d96458c-sz57m\" (UID: \"ed3610d7-f747-4c99-b42c-35f1b05e17b4\") " pod="openstack/dnsmasq-dns-767d96458c-sz57m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.642171 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ed3610d7-f747-4c99-b42c-35f1b05e17b4-ovsdbserver-sb\") pod \"dnsmasq-dns-767d96458c-sz57m\" (UID: \"ed3610d7-f747-4c99-b42c-35f1b05e17b4\") " pod="openstack/dnsmasq-dns-767d96458c-sz57m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.642194 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed3610d7-f747-4c99-b42c-35f1b05e17b4-config\") pod \"dnsmasq-dns-767d96458c-sz57m\" (UID: \"ed3610d7-f747-4c99-b42c-35f1b05e17b4\") " pod="openstack/dnsmasq-dns-767d96458c-sz57m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.668670 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-wm86m"] Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.670250 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-wm86m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.673643 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.673967 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.674110 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.674236 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.674337 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-nsd8s" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.699171 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-wm86m"] Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.743655 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eee6a60e-d425-4f40-9895-62497236658f-config-data\") pod \"keystone-bootstrap-wm86m\" (UID: \"eee6a60e-d425-4f40-9895-62497236658f\") " pod="openstack/keystone-bootstrap-wm86m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.745130 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eee6a60e-d425-4f40-9895-62497236658f-scripts\") pod \"keystone-bootstrap-wm86m\" (UID: \"eee6a60e-d425-4f40-9895-62497236658f\") " pod="openstack/keystone-bootstrap-wm86m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.745259 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed3610d7-f747-4c99-b42c-35f1b05e17b4-dns-svc\") pod \"dnsmasq-dns-767d96458c-sz57m\" (UID: \"ed3610d7-f747-4c99-b42c-35f1b05e17b4\") " pod="openstack/dnsmasq-dns-767d96458c-sz57m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.745333 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eee6a60e-d425-4f40-9895-62497236658f-combined-ca-bundle\") pod \"keystone-bootstrap-wm86m\" (UID: \"eee6a60e-d425-4f40-9895-62497236658f\") " pod="openstack/keystone-bootstrap-wm86m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.745424 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ed3610d7-f747-4c99-b42c-35f1b05e17b4-ovsdbserver-nb\") pod \"dnsmasq-dns-767d96458c-sz57m\" (UID: \"ed3610d7-f747-4c99-b42c-35f1b05e17b4\") " pod="openstack/dnsmasq-dns-767d96458c-sz57m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.745498 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ed3610d7-f747-4c99-b42c-35f1b05e17b4-dns-swift-storage-0\") pod \"dnsmasq-dns-767d96458c-sz57m\" (UID: \"ed3610d7-f747-4c99-b42c-35f1b05e17b4\") " pod="openstack/dnsmasq-dns-767d96458c-sz57m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.745614 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mn2dx\" (UniqueName: \"kubernetes.io/projected/eee6a60e-d425-4f40-9895-62497236658f-kube-api-access-mn2dx\") pod \"keystone-bootstrap-wm86m\" (UID: \"eee6a60e-d425-4f40-9895-62497236658f\") " pod="openstack/keystone-bootstrap-wm86m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.745702 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/eee6a60e-d425-4f40-9895-62497236658f-credential-keys\") pod \"keystone-bootstrap-wm86m\" (UID: \"eee6a60e-d425-4f40-9895-62497236658f\") " pod="openstack/keystone-bootstrap-wm86m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.745771 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrq2s\" (UniqueName: \"kubernetes.io/projected/ed3610d7-f747-4c99-b42c-35f1b05e17b4-kube-api-access-lrq2s\") pod \"dnsmasq-dns-767d96458c-sz57m\" (UID: \"ed3610d7-f747-4c99-b42c-35f1b05e17b4\") " pod="openstack/dnsmasq-dns-767d96458c-sz57m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.745837 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ed3610d7-f747-4c99-b42c-35f1b05e17b4-ovsdbserver-sb\") pod \"dnsmasq-dns-767d96458c-sz57m\" (UID: \"ed3610d7-f747-4c99-b42c-35f1b05e17b4\") " pod="openstack/dnsmasq-dns-767d96458c-sz57m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.745912 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed3610d7-f747-4c99-b42c-35f1b05e17b4-config\") pod \"dnsmasq-dns-767d96458c-sz57m\" (UID: \"ed3610d7-f747-4c99-b42c-35f1b05e17b4\") " pod="openstack/dnsmasq-dns-767d96458c-sz57m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.746052 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eee6a60e-d425-4f40-9895-62497236658f-fernet-keys\") pod \"keystone-bootstrap-wm86m\" (UID: \"eee6a60e-d425-4f40-9895-62497236658f\") " pod="openstack/keystone-bootstrap-wm86m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.746592 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed3610d7-f747-4c99-b42c-35f1b05e17b4-dns-svc\") pod \"dnsmasq-dns-767d96458c-sz57m\" (UID: \"ed3610d7-f747-4c99-b42c-35f1b05e17b4\") " pod="openstack/dnsmasq-dns-767d96458c-sz57m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.747047 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ed3610d7-f747-4c99-b42c-35f1b05e17b4-ovsdbserver-nb\") pod \"dnsmasq-dns-767d96458c-sz57m\" (UID: \"ed3610d7-f747-4c99-b42c-35f1b05e17b4\") " pod="openstack/dnsmasq-dns-767d96458c-sz57m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.747176 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ed3610d7-f747-4c99-b42c-35f1b05e17b4-dns-swift-storage-0\") pod \"dnsmasq-dns-767d96458c-sz57m\" (UID: \"ed3610d7-f747-4c99-b42c-35f1b05e17b4\") " pod="openstack/dnsmasq-dns-767d96458c-sz57m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.747389 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ed3610d7-f747-4c99-b42c-35f1b05e17b4-ovsdbserver-sb\") pod \"dnsmasq-dns-767d96458c-sz57m\" (UID: \"ed3610d7-f747-4c99-b42c-35f1b05e17b4\") " pod="openstack/dnsmasq-dns-767d96458c-sz57m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.748103 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed3610d7-f747-4c99-b42c-35f1b05e17b4-config\") pod \"dnsmasq-dns-767d96458c-sz57m\" (UID: \"ed3610d7-f747-4c99-b42c-35f1b05e17b4\") " pod="openstack/dnsmasq-dns-767d96458c-sz57m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.753792 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67fdf7998c-npwcd"] Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.766037 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-67fdf7998c-npwcd"] Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.784765 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrq2s\" (UniqueName: \"kubernetes.io/projected/ed3610d7-f747-4c99-b42c-35f1b05e17b4-kube-api-access-lrq2s\") pod \"dnsmasq-dns-767d96458c-sz57m\" (UID: \"ed3610d7-f747-4c99-b42c-35f1b05e17b4\") " pod="openstack/dnsmasq-dns-767d96458c-sz57m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.820381 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33a31de1-636d-416a-a0a3-1f5cb2de8ce1" path="/var/lib/kubelet/pods/33a31de1-636d-416a-a0a3-1f5cb2de8ce1/volumes" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.848469 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mn2dx\" (UniqueName: \"kubernetes.io/projected/eee6a60e-d425-4f40-9895-62497236658f-kube-api-access-mn2dx\") pod \"keystone-bootstrap-wm86m\" (UID: \"eee6a60e-d425-4f40-9895-62497236658f\") " pod="openstack/keystone-bootstrap-wm86m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.848554 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/eee6a60e-d425-4f40-9895-62497236658f-credential-keys\") pod \"keystone-bootstrap-wm86m\" (UID: \"eee6a60e-d425-4f40-9895-62497236658f\") " pod="openstack/keystone-bootstrap-wm86m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.848586 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eee6a60e-d425-4f40-9895-62497236658f-fernet-keys\") pod \"keystone-bootstrap-wm86m\" (UID: \"eee6a60e-d425-4f40-9895-62497236658f\") " pod="openstack/keystone-bootstrap-wm86m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.848625 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eee6a60e-d425-4f40-9895-62497236658f-config-data\") pod \"keystone-bootstrap-wm86m\" (UID: \"eee6a60e-d425-4f40-9895-62497236658f\") " pod="openstack/keystone-bootstrap-wm86m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.848666 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eee6a60e-d425-4f40-9895-62497236658f-scripts\") pod \"keystone-bootstrap-wm86m\" (UID: \"eee6a60e-d425-4f40-9895-62497236658f\") " pod="openstack/keystone-bootstrap-wm86m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.848696 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eee6a60e-d425-4f40-9895-62497236658f-combined-ca-bundle\") pod \"keystone-bootstrap-wm86m\" (UID: \"eee6a60e-d425-4f40-9895-62497236658f\") " pod="openstack/keystone-bootstrap-wm86m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.852551 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-2zm85"] Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.853675 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-2zm85" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.857281 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/eee6a60e-d425-4f40-9895-62497236658f-credential-keys\") pod \"keystone-bootstrap-wm86m\" (UID: \"eee6a60e-d425-4f40-9895-62497236658f\") " pod="openstack/keystone-bootstrap-wm86m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.857914 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.858006 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eee6a60e-d425-4f40-9895-62497236658f-fernet-keys\") pod \"keystone-bootstrap-wm86m\" (UID: \"eee6a60e-d425-4f40-9895-62497236658f\") " pod="openstack/keystone-bootstrap-wm86m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.857914 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-5v97r" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.858101 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.859183 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eee6a60e-d425-4f40-9895-62497236658f-scripts\") pod \"keystone-bootstrap-wm86m\" (UID: \"eee6a60e-d425-4f40-9895-62497236658f\") " pod="openstack/keystone-bootstrap-wm86m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.861258 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eee6a60e-d425-4f40-9895-62497236658f-config-data\") pod \"keystone-bootstrap-wm86m\" (UID: \"eee6a60e-d425-4f40-9895-62497236658f\") " pod="openstack/keystone-bootstrap-wm86m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.879180 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-2zm85"] Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.881046 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eee6a60e-d425-4f40-9895-62497236658f-combined-ca-bundle\") pod \"keystone-bootstrap-wm86m\" (UID: \"eee6a60e-d425-4f40-9895-62497236658f\") " pod="openstack/keystone-bootstrap-wm86m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.881290 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mn2dx\" (UniqueName: \"kubernetes.io/projected/eee6a60e-d425-4f40-9895-62497236658f-kube-api-access-mn2dx\") pod \"keystone-bootstrap-wm86m\" (UID: \"eee6a60e-d425-4f40-9895-62497236658f\") " pod="openstack/keystone-bootstrap-wm86m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.909252 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.918545 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.918676 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.923518 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.923784 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.932858 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-767d96458c-sz57m" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.950042 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/821beb7d-9467-4756-8f10-c178e1bcc89e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"821beb7d-9467-4756-8f10-c178e1bcc89e\") " pod="openstack/ceilometer-0" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.950084 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b47fe1da-877a-4d5e-a21b-c7955bd00b30-etc-machine-id\") pod \"cinder-db-sync-2zm85\" (UID: \"b47fe1da-877a-4d5e-a21b-c7955bd00b30\") " pod="openstack/cinder-db-sync-2zm85" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.950103 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b47fe1da-877a-4d5e-a21b-c7955bd00b30-scripts\") pod \"cinder-db-sync-2zm85\" (UID: \"b47fe1da-877a-4d5e-a21b-c7955bd00b30\") " pod="openstack/cinder-db-sync-2zm85" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.950125 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/821beb7d-9467-4756-8f10-c178e1bcc89e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"821beb7d-9467-4756-8f10-c178e1bcc89e\") " pod="openstack/ceilometer-0" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.950145 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/821beb7d-9467-4756-8f10-c178e1bcc89e-run-httpd\") pod \"ceilometer-0\" (UID: \"821beb7d-9467-4756-8f10-c178e1bcc89e\") " pod="openstack/ceilometer-0" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.950164 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b47fe1da-877a-4d5e-a21b-c7955bd00b30-db-sync-config-data\") pod \"cinder-db-sync-2zm85\" (UID: \"b47fe1da-877a-4d5e-a21b-c7955bd00b30\") " pod="openstack/cinder-db-sync-2zm85" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.950193 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b47fe1da-877a-4d5e-a21b-c7955bd00b30-config-data\") pod \"cinder-db-sync-2zm85\" (UID: \"b47fe1da-877a-4d5e-a21b-c7955bd00b30\") " pod="openstack/cinder-db-sync-2zm85" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.950213 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trhwh\" (UniqueName: \"kubernetes.io/projected/b47fe1da-877a-4d5e-a21b-c7955bd00b30-kube-api-access-trhwh\") pod \"cinder-db-sync-2zm85\" (UID: \"b47fe1da-877a-4d5e-a21b-c7955bd00b30\") " pod="openstack/cinder-db-sync-2zm85" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.950245 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/821beb7d-9467-4756-8f10-c178e1bcc89e-scripts\") pod \"ceilometer-0\" (UID: \"821beb7d-9467-4756-8f10-c178e1bcc89e\") " pod="openstack/ceilometer-0" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.950266 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8mp2\" (UniqueName: \"kubernetes.io/projected/821beb7d-9467-4756-8f10-c178e1bcc89e-kube-api-access-f8mp2\") pod \"ceilometer-0\" (UID: \"821beb7d-9467-4756-8f10-c178e1bcc89e\") " pod="openstack/ceilometer-0" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.950284 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b47fe1da-877a-4d5e-a21b-c7955bd00b30-combined-ca-bundle\") pod \"cinder-db-sync-2zm85\" (UID: \"b47fe1da-877a-4d5e-a21b-c7955bd00b30\") " pod="openstack/cinder-db-sync-2zm85" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.950303 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/821beb7d-9467-4756-8f10-c178e1bcc89e-log-httpd\") pod \"ceilometer-0\" (UID: \"821beb7d-9467-4756-8f10-c178e1bcc89e\") " pod="openstack/ceilometer-0" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.950347 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/821beb7d-9467-4756-8f10-c178e1bcc89e-config-data\") pod \"ceilometer-0\" (UID: \"821beb7d-9467-4756-8f10-c178e1bcc89e\") " pod="openstack/ceilometer-0" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.968956 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-4qtmm"] Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.981402 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4qtmm" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.987720 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.988266 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Jan 04 12:09:14 crc kubenswrapper[5003]: I0104 12:09:14.988621 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-zx24b" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.019715 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-4qtmm"] Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.040967 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-wm86m" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.051219 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-qx4js"] Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.055370 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-qx4js" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.055387 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b47fe1da-877a-4d5e-a21b-c7955bd00b30-config-data\") pod \"cinder-db-sync-2zm85\" (UID: \"b47fe1da-877a-4d5e-a21b-c7955bd00b30\") " pod="openstack/cinder-db-sync-2zm85" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.055607 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trhwh\" (UniqueName: \"kubernetes.io/projected/b47fe1da-877a-4d5e-a21b-c7955bd00b30-kube-api-access-trhwh\") pod \"cinder-db-sync-2zm85\" (UID: \"b47fe1da-877a-4d5e-a21b-c7955bd00b30\") " pod="openstack/cinder-db-sync-2zm85" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.055864 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a6fea540-25d6-49c0-86f9-9476e70ceb93-config\") pod \"neutron-db-sync-4qtmm\" (UID: \"a6fea540-25d6-49c0-86f9-9476e70ceb93\") " pod="openstack/neutron-db-sync-4qtmm" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.056151 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/821beb7d-9467-4756-8f10-c178e1bcc89e-scripts\") pod \"ceilometer-0\" (UID: \"821beb7d-9467-4756-8f10-c178e1bcc89e\") " pod="openstack/ceilometer-0" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.059213 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8mp2\" (UniqueName: \"kubernetes.io/projected/821beb7d-9467-4756-8f10-c178e1bcc89e-kube-api-access-f8mp2\") pod \"ceilometer-0\" (UID: \"821beb7d-9467-4756-8f10-c178e1bcc89e\") " pod="openstack/ceilometer-0" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.059281 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b47fe1da-877a-4d5e-a21b-c7955bd00b30-combined-ca-bundle\") pod \"cinder-db-sync-2zm85\" (UID: \"b47fe1da-877a-4d5e-a21b-c7955bd00b30\") " pod="openstack/cinder-db-sync-2zm85" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.059337 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/821beb7d-9467-4756-8f10-c178e1bcc89e-log-httpd\") pod \"ceilometer-0\" (UID: \"821beb7d-9467-4756-8f10-c178e1bcc89e\") " pod="openstack/ceilometer-0" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.059535 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/821beb7d-9467-4756-8f10-c178e1bcc89e-config-data\") pod \"ceilometer-0\" (UID: \"821beb7d-9467-4756-8f10-c178e1bcc89e\") " pod="openstack/ceilometer-0" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.059674 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6twh9\" (UniqueName: \"kubernetes.io/projected/a6fea540-25d6-49c0-86f9-9476e70ceb93-kube-api-access-6twh9\") pod \"neutron-db-sync-4qtmm\" (UID: \"a6fea540-25d6-49c0-86f9-9476e70ceb93\") " pod="openstack/neutron-db-sync-4qtmm" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.059825 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/821beb7d-9467-4756-8f10-c178e1bcc89e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"821beb7d-9467-4756-8f10-c178e1bcc89e\") " pod="openstack/ceilometer-0" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.059845 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6fea540-25d6-49c0-86f9-9476e70ceb93-combined-ca-bundle\") pod \"neutron-db-sync-4qtmm\" (UID: \"a6fea540-25d6-49c0-86f9-9476e70ceb93\") " pod="openstack/neutron-db-sync-4qtmm" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.059901 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b47fe1da-877a-4d5e-a21b-c7955bd00b30-etc-machine-id\") pod \"cinder-db-sync-2zm85\" (UID: \"b47fe1da-877a-4d5e-a21b-c7955bd00b30\") " pod="openstack/cinder-db-sync-2zm85" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.059917 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b47fe1da-877a-4d5e-a21b-c7955bd00b30-scripts\") pod \"cinder-db-sync-2zm85\" (UID: \"b47fe1da-877a-4d5e-a21b-c7955bd00b30\") " pod="openstack/cinder-db-sync-2zm85" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.059971 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/821beb7d-9467-4756-8f10-c178e1bcc89e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"821beb7d-9467-4756-8f10-c178e1bcc89e\") " pod="openstack/ceilometer-0" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.060035 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/821beb7d-9467-4756-8f10-c178e1bcc89e-run-httpd\") pod \"ceilometer-0\" (UID: \"821beb7d-9467-4756-8f10-c178e1bcc89e\") " pod="openstack/ceilometer-0" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.060055 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b47fe1da-877a-4d5e-a21b-c7955bd00b30-db-sync-config-data\") pod \"cinder-db-sync-2zm85\" (UID: \"b47fe1da-877a-4d5e-a21b-c7955bd00b30\") " pod="openstack/cinder-db-sync-2zm85" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.063674 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b47fe1da-877a-4d5e-a21b-c7955bd00b30-config-data\") pod \"cinder-db-sync-2zm85\" (UID: \"b47fe1da-877a-4d5e-a21b-c7955bd00b30\") " pod="openstack/cinder-db-sync-2zm85" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.064856 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-8f5bx" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.065112 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.068457 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/821beb7d-9467-4756-8f10-c178e1bcc89e-log-httpd\") pod \"ceilometer-0\" (UID: \"821beb7d-9467-4756-8f10-c178e1bcc89e\") " pod="openstack/ceilometer-0" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.068511 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b47fe1da-877a-4d5e-a21b-c7955bd00b30-etc-machine-id\") pod \"cinder-db-sync-2zm85\" (UID: \"b47fe1da-877a-4d5e-a21b-c7955bd00b30\") " pod="openstack/cinder-db-sync-2zm85" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.073945 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b47fe1da-877a-4d5e-a21b-c7955bd00b30-db-sync-config-data\") pod \"cinder-db-sync-2zm85\" (UID: \"b47fe1da-877a-4d5e-a21b-c7955bd00b30\") " pod="openstack/cinder-db-sync-2zm85" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.074383 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/821beb7d-9467-4756-8f10-c178e1bcc89e-run-httpd\") pod \"ceilometer-0\" (UID: \"821beb7d-9467-4756-8f10-c178e1bcc89e\") " pod="openstack/ceilometer-0" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.074617 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b47fe1da-877a-4d5e-a21b-c7955bd00b30-combined-ca-bundle\") pod \"cinder-db-sync-2zm85\" (UID: \"b47fe1da-877a-4d5e-a21b-c7955bd00b30\") " pod="openstack/cinder-db-sync-2zm85" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.076388 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/821beb7d-9467-4756-8f10-c178e1bcc89e-config-data\") pod \"ceilometer-0\" (UID: \"821beb7d-9467-4756-8f10-c178e1bcc89e\") " pod="openstack/ceilometer-0" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.077534 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-qx4js"] Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.080754 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/821beb7d-9467-4756-8f10-c178e1bcc89e-scripts\") pod \"ceilometer-0\" (UID: \"821beb7d-9467-4756-8f10-c178e1bcc89e\") " pod="openstack/ceilometer-0" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.084556 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/821beb7d-9467-4756-8f10-c178e1bcc89e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"821beb7d-9467-4756-8f10-c178e1bcc89e\") " pod="openstack/ceilometer-0" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.085212 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b47fe1da-877a-4d5e-a21b-c7955bd00b30-scripts\") pod \"cinder-db-sync-2zm85\" (UID: \"b47fe1da-877a-4d5e-a21b-c7955bd00b30\") " pod="openstack/cinder-db-sync-2zm85" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.086987 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trhwh\" (UniqueName: \"kubernetes.io/projected/b47fe1da-877a-4d5e-a21b-c7955bd00b30-kube-api-access-trhwh\") pod \"cinder-db-sync-2zm85\" (UID: \"b47fe1da-877a-4d5e-a21b-c7955bd00b30\") " pod="openstack/cinder-db-sync-2zm85" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.089704 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/821beb7d-9467-4756-8f10-c178e1bcc89e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"821beb7d-9467-4756-8f10-c178e1bcc89e\") " pod="openstack/ceilometer-0" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.095842 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-767d96458c-sz57m"] Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.119830 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8mp2\" (UniqueName: \"kubernetes.io/projected/821beb7d-9467-4756-8f10-c178e1bcc89e-kube-api-access-f8mp2\") pod \"ceilometer-0\" (UID: \"821beb7d-9467-4756-8f10-c178e1bcc89e\") " pod="openstack/ceilometer-0" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.162106 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6fea540-25d6-49c0-86f9-9476e70ceb93-combined-ca-bundle\") pod \"neutron-db-sync-4qtmm\" (UID: \"a6fea540-25d6-49c0-86f9-9476e70ceb93\") " pod="openstack/neutron-db-sync-4qtmm" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.162245 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c2b0b250-33ee-45aa-baa8-1540d9f39b03-db-sync-config-data\") pod \"barbican-db-sync-qx4js\" (UID: \"c2b0b250-33ee-45aa-baa8-1540d9f39b03\") " pod="openstack/barbican-db-sync-qx4js" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.162294 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a6fea540-25d6-49c0-86f9-9476e70ceb93-config\") pod \"neutron-db-sync-4qtmm\" (UID: \"a6fea540-25d6-49c0-86f9-9476e70ceb93\") " pod="openstack/neutron-db-sync-4qtmm" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.162456 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2b0b250-33ee-45aa-baa8-1540d9f39b03-combined-ca-bundle\") pod \"barbican-db-sync-qx4js\" (UID: \"c2b0b250-33ee-45aa-baa8-1540d9f39b03\") " pod="openstack/barbican-db-sync-qx4js" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.162493 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ghd9k\" (UniqueName: \"kubernetes.io/projected/c2b0b250-33ee-45aa-baa8-1540d9f39b03-kube-api-access-ghd9k\") pod \"barbican-db-sync-qx4js\" (UID: \"c2b0b250-33ee-45aa-baa8-1540d9f39b03\") " pod="openstack/barbican-db-sync-qx4js" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.162595 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6twh9\" (UniqueName: \"kubernetes.io/projected/a6fea540-25d6-49c0-86f9-9476e70ceb93-kube-api-access-6twh9\") pod \"neutron-db-sync-4qtmm\" (UID: \"a6fea540-25d6-49c0-86f9-9476e70ceb93\") " pod="openstack/neutron-db-sync-4qtmm" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.171740 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/a6fea540-25d6-49c0-86f9-9476e70ceb93-config\") pod \"neutron-db-sync-4qtmm\" (UID: \"a6fea540-25d6-49c0-86f9-9476e70ceb93\") " pod="openstack/neutron-db-sync-4qtmm" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.173801 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6fea540-25d6-49c0-86f9-9476e70ceb93-combined-ca-bundle\") pod \"neutron-db-sync-4qtmm\" (UID: \"a6fea540-25d6-49c0-86f9-9476e70ceb93\") " pod="openstack/neutron-db-sync-4qtmm" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.205671 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6twh9\" (UniqueName: \"kubernetes.io/projected/a6fea540-25d6-49c0-86f9-9476e70ceb93-kube-api-access-6twh9\") pod \"neutron-db-sync-4qtmm\" (UID: \"a6fea540-25d6-49c0-86f9-9476e70ceb93\") " pod="openstack/neutron-db-sync-4qtmm" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.221185 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-85q57"] Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.243754 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-85q57" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.254358 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.255540 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-5w4cq" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.255627 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.283582 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2b0b250-33ee-45aa-baa8-1540d9f39b03-combined-ca-bundle\") pod \"barbican-db-sync-qx4js\" (UID: \"c2b0b250-33ee-45aa-baa8-1540d9f39b03\") " pod="openstack/barbican-db-sync-qx4js" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.283933 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ghd9k\" (UniqueName: \"kubernetes.io/projected/c2b0b250-33ee-45aa-baa8-1540d9f39b03-kube-api-access-ghd9k\") pod \"barbican-db-sync-qx4js\" (UID: \"c2b0b250-33ee-45aa-baa8-1540d9f39b03\") " pod="openstack/barbican-db-sync-qx4js" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.284333 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c2b0b250-33ee-45aa-baa8-1540d9f39b03-db-sync-config-data\") pod \"barbican-db-sync-qx4js\" (UID: \"c2b0b250-33ee-45aa-baa8-1540d9f39b03\") " pod="openstack/barbican-db-sync-qx4js" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.290702 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c2b0b250-33ee-45aa-baa8-1540d9f39b03-db-sync-config-data\") pod \"barbican-db-sync-qx4js\" (UID: \"c2b0b250-33ee-45aa-baa8-1540d9f39b03\") " pod="openstack/barbican-db-sync-qx4js" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.290794 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7fc6d4ffc7-mthsh"] Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.293265 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fc6d4ffc7-mthsh" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.308950 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2b0b250-33ee-45aa-baa8-1540d9f39b03-combined-ca-bundle\") pod \"barbican-db-sync-qx4js\" (UID: \"c2b0b250-33ee-45aa-baa8-1540d9f39b03\") " pod="openstack/barbican-db-sync-qx4js" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.316792 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-2zm85" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.317034 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-85q57"] Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.321650 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ghd9k\" (UniqueName: \"kubernetes.io/projected/c2b0b250-33ee-45aa-baa8-1540d9f39b03-kube-api-access-ghd9k\") pod \"barbican-db-sync-qx4js\" (UID: \"c2b0b250-33ee-45aa-baa8-1540d9f39b03\") " pod="openstack/barbican-db-sync-qx4js" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.337443 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.349301 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4qtmm" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.355419 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fc6d4ffc7-mthsh"] Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.386964 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/724a46c8-1096-4753-ba3e-e4128189c8ae-combined-ca-bundle\") pod \"placement-db-sync-85q57\" (UID: \"724a46c8-1096-4753-ba3e-e4128189c8ae\") " pod="openstack/placement-db-sync-85q57" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.387078 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48ca11e7-47b1-4362-aab7-6afd4617a783-config\") pod \"dnsmasq-dns-7fc6d4ffc7-mthsh\" (UID: \"48ca11e7-47b1-4362-aab7-6afd4617a783\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-mthsh" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.387159 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/724a46c8-1096-4753-ba3e-e4128189c8ae-scripts\") pod \"placement-db-sync-85q57\" (UID: \"724a46c8-1096-4753-ba3e-e4128189c8ae\") " pod="openstack/placement-db-sync-85q57" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.387222 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s57cp\" (UniqueName: \"kubernetes.io/projected/724a46c8-1096-4753-ba3e-e4128189c8ae-kube-api-access-s57cp\") pod \"placement-db-sync-85q57\" (UID: \"724a46c8-1096-4753-ba3e-e4128189c8ae\") " pod="openstack/placement-db-sync-85q57" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.387290 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jccbg\" (UniqueName: \"kubernetes.io/projected/48ca11e7-47b1-4362-aab7-6afd4617a783-kube-api-access-jccbg\") pod \"dnsmasq-dns-7fc6d4ffc7-mthsh\" (UID: \"48ca11e7-47b1-4362-aab7-6afd4617a783\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-mthsh" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.387310 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/724a46c8-1096-4753-ba3e-e4128189c8ae-logs\") pod \"placement-db-sync-85q57\" (UID: \"724a46c8-1096-4753-ba3e-e4128189c8ae\") " pod="openstack/placement-db-sync-85q57" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.387329 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/48ca11e7-47b1-4362-aab7-6afd4617a783-ovsdbserver-sb\") pod \"dnsmasq-dns-7fc6d4ffc7-mthsh\" (UID: \"48ca11e7-47b1-4362-aab7-6afd4617a783\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-mthsh" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.387349 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/48ca11e7-47b1-4362-aab7-6afd4617a783-ovsdbserver-nb\") pod \"dnsmasq-dns-7fc6d4ffc7-mthsh\" (UID: \"48ca11e7-47b1-4362-aab7-6afd4617a783\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-mthsh" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.387371 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/48ca11e7-47b1-4362-aab7-6afd4617a783-dns-svc\") pod \"dnsmasq-dns-7fc6d4ffc7-mthsh\" (UID: \"48ca11e7-47b1-4362-aab7-6afd4617a783\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-mthsh" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.387391 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/48ca11e7-47b1-4362-aab7-6afd4617a783-dns-swift-storage-0\") pod \"dnsmasq-dns-7fc6d4ffc7-mthsh\" (UID: \"48ca11e7-47b1-4362-aab7-6afd4617a783\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-mthsh" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.387416 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/724a46c8-1096-4753-ba3e-e4128189c8ae-config-data\") pod \"placement-db-sync-85q57\" (UID: \"724a46c8-1096-4753-ba3e-e4128189c8ae\") " pod="openstack/placement-db-sync-85q57" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.387828 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-qx4js" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.488599 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48ca11e7-47b1-4362-aab7-6afd4617a783-config\") pod \"dnsmasq-dns-7fc6d4ffc7-mthsh\" (UID: \"48ca11e7-47b1-4362-aab7-6afd4617a783\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-mthsh" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.489142 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/724a46c8-1096-4753-ba3e-e4128189c8ae-scripts\") pod \"placement-db-sync-85q57\" (UID: \"724a46c8-1096-4753-ba3e-e4128189c8ae\") " pod="openstack/placement-db-sync-85q57" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.489253 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s57cp\" (UniqueName: \"kubernetes.io/projected/724a46c8-1096-4753-ba3e-e4128189c8ae-kube-api-access-s57cp\") pod \"placement-db-sync-85q57\" (UID: \"724a46c8-1096-4753-ba3e-e4128189c8ae\") " pod="openstack/placement-db-sync-85q57" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.489333 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jccbg\" (UniqueName: \"kubernetes.io/projected/48ca11e7-47b1-4362-aab7-6afd4617a783-kube-api-access-jccbg\") pod \"dnsmasq-dns-7fc6d4ffc7-mthsh\" (UID: \"48ca11e7-47b1-4362-aab7-6afd4617a783\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-mthsh" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.489399 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/724a46c8-1096-4753-ba3e-e4128189c8ae-logs\") pod \"placement-db-sync-85q57\" (UID: \"724a46c8-1096-4753-ba3e-e4128189c8ae\") " pod="openstack/placement-db-sync-85q57" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.489457 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/48ca11e7-47b1-4362-aab7-6afd4617a783-ovsdbserver-sb\") pod \"dnsmasq-dns-7fc6d4ffc7-mthsh\" (UID: \"48ca11e7-47b1-4362-aab7-6afd4617a783\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-mthsh" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.489520 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/48ca11e7-47b1-4362-aab7-6afd4617a783-ovsdbserver-nb\") pod \"dnsmasq-dns-7fc6d4ffc7-mthsh\" (UID: \"48ca11e7-47b1-4362-aab7-6afd4617a783\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-mthsh" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.489580 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/48ca11e7-47b1-4362-aab7-6afd4617a783-dns-svc\") pod \"dnsmasq-dns-7fc6d4ffc7-mthsh\" (UID: \"48ca11e7-47b1-4362-aab7-6afd4617a783\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-mthsh" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.489653 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/48ca11e7-47b1-4362-aab7-6afd4617a783-dns-swift-storage-0\") pod \"dnsmasq-dns-7fc6d4ffc7-mthsh\" (UID: \"48ca11e7-47b1-4362-aab7-6afd4617a783\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-mthsh" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.489724 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/724a46c8-1096-4753-ba3e-e4128189c8ae-config-data\") pod \"placement-db-sync-85q57\" (UID: \"724a46c8-1096-4753-ba3e-e4128189c8ae\") " pod="openstack/placement-db-sync-85q57" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.489795 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/724a46c8-1096-4753-ba3e-e4128189c8ae-combined-ca-bundle\") pod \"placement-db-sync-85q57\" (UID: \"724a46c8-1096-4753-ba3e-e4128189c8ae\") " pod="openstack/placement-db-sync-85q57" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.490848 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/724a46c8-1096-4753-ba3e-e4128189c8ae-logs\") pod \"placement-db-sync-85q57\" (UID: \"724a46c8-1096-4753-ba3e-e4128189c8ae\") " pod="openstack/placement-db-sync-85q57" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.491800 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48ca11e7-47b1-4362-aab7-6afd4617a783-config\") pod \"dnsmasq-dns-7fc6d4ffc7-mthsh\" (UID: \"48ca11e7-47b1-4362-aab7-6afd4617a783\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-mthsh" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.492185 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/48ca11e7-47b1-4362-aab7-6afd4617a783-dns-svc\") pod \"dnsmasq-dns-7fc6d4ffc7-mthsh\" (UID: \"48ca11e7-47b1-4362-aab7-6afd4617a783\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-mthsh" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.492410 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/48ca11e7-47b1-4362-aab7-6afd4617a783-dns-swift-storage-0\") pod \"dnsmasq-dns-7fc6d4ffc7-mthsh\" (UID: \"48ca11e7-47b1-4362-aab7-6afd4617a783\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-mthsh" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.492790 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/48ca11e7-47b1-4362-aab7-6afd4617a783-ovsdbserver-sb\") pod \"dnsmasq-dns-7fc6d4ffc7-mthsh\" (UID: \"48ca11e7-47b1-4362-aab7-6afd4617a783\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-mthsh" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.497283 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/48ca11e7-47b1-4362-aab7-6afd4617a783-ovsdbserver-nb\") pod \"dnsmasq-dns-7fc6d4ffc7-mthsh\" (UID: \"48ca11e7-47b1-4362-aab7-6afd4617a783\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-mthsh" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.504460 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/724a46c8-1096-4753-ba3e-e4128189c8ae-scripts\") pod \"placement-db-sync-85q57\" (UID: \"724a46c8-1096-4753-ba3e-e4128189c8ae\") " pod="openstack/placement-db-sync-85q57" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.505043 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/724a46c8-1096-4753-ba3e-e4128189c8ae-config-data\") pod \"placement-db-sync-85q57\" (UID: \"724a46c8-1096-4753-ba3e-e4128189c8ae\") " pod="openstack/placement-db-sync-85q57" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.505491 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/724a46c8-1096-4753-ba3e-e4128189c8ae-combined-ca-bundle\") pod \"placement-db-sync-85q57\" (UID: \"724a46c8-1096-4753-ba3e-e4128189c8ae\") " pod="openstack/placement-db-sync-85q57" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.532055 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s57cp\" (UniqueName: \"kubernetes.io/projected/724a46c8-1096-4753-ba3e-e4128189c8ae-kube-api-access-s57cp\") pod \"placement-db-sync-85q57\" (UID: \"724a46c8-1096-4753-ba3e-e4128189c8ae\") " pod="openstack/placement-db-sync-85q57" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.535419 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jccbg\" (UniqueName: \"kubernetes.io/projected/48ca11e7-47b1-4362-aab7-6afd4617a783-kube-api-access-jccbg\") pod \"dnsmasq-dns-7fc6d4ffc7-mthsh\" (UID: \"48ca11e7-47b1-4362-aab7-6afd4617a783\") " pod="openstack/dnsmasq-dns-7fc6d4ffc7-mthsh" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.705461 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-85q57" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.776194 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fc6d4ffc7-mthsh" Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.857237 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-wm86m"] Jan 04 12:09:15 crc kubenswrapper[5003]: I0104 12:09:15.885823 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-767d96458c-sz57m"] Jan 04 12:09:16 crc kubenswrapper[5003]: I0104 12:09:16.075090 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-2zm85"] Jan 04 12:09:16 crc kubenswrapper[5003]: W0104 12:09:16.101199 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb47fe1da_877a_4d5e_a21b_c7955bd00b30.slice/crio-782d0879abd3c41319ef1fbae968d0335eec61a1791349e968383a980dff9f56 WatchSource:0}: Error finding container 782d0879abd3c41319ef1fbae968d0335eec61a1791349e968383a980dff9f56: Status 404 returned error can't find the container with id 782d0879abd3c41319ef1fbae968d0335eec61a1791349e968383a980dff9f56 Jan 04 12:09:16 crc kubenswrapper[5003]: I0104 12:09:16.289151 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:09:16 crc kubenswrapper[5003]: W0104 12:09:16.338267 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod821beb7d_9467_4756_8f10_c178e1bcc89e.slice/crio-9e41c9d57b1d33a51fd91ddc8de771b6022a673f1246740aaf79ba205ddeaa7c WatchSource:0}: Error finding container 9e41c9d57b1d33a51fd91ddc8de771b6022a673f1246740aaf79ba205ddeaa7c: Status 404 returned error can't find the container with id 9e41c9d57b1d33a51fd91ddc8de771b6022a673f1246740aaf79ba205ddeaa7c Jan 04 12:09:16 crc kubenswrapper[5003]: I0104 12:09:16.491207 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-qx4js"] Jan 04 12:09:16 crc kubenswrapper[5003]: W0104 12:09:16.495117 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod724a46c8_1096_4753_ba3e_e4128189c8ae.slice/crio-77509cfdd5ff709178c08d751d94dfa194154845d41b6bef621c18db884e87e2 WatchSource:0}: Error finding container 77509cfdd5ff709178c08d751d94dfa194154845d41b6bef621c18db884e87e2: Status 404 returned error can't find the container with id 77509cfdd5ff709178c08d751d94dfa194154845d41b6bef621c18db884e87e2 Jan 04 12:09:16 crc kubenswrapper[5003]: W0104 12:09:16.495683 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc2b0b250_33ee_45aa_baa8_1540d9f39b03.slice/crio-5ee917fddda1a94c7bd5209bb637ecb517e5f26bb8e5dc4d6a4f868837ab9f3d WatchSource:0}: Error finding container 5ee917fddda1a94c7bd5209bb637ecb517e5f26bb8e5dc4d6a4f868837ab9f3d: Status 404 returned error can't find the container with id 5ee917fddda1a94c7bd5209bb637ecb517e5f26bb8e5dc4d6a4f868837ab9f3d Jan 04 12:09:16 crc kubenswrapper[5003]: I0104 12:09:16.501519 5003 generic.go:334] "Generic (PLEG): container finished" podID="ed3610d7-f747-4c99-b42c-35f1b05e17b4" containerID="48a22b64f58300674759c80467b6a65991a5fa70c243df9b5dc8efa6cf25cedf" exitCode=0 Jan 04 12:09:16 crc kubenswrapper[5003]: I0104 12:09:16.502044 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-767d96458c-sz57m" event={"ID":"ed3610d7-f747-4c99-b42c-35f1b05e17b4","Type":"ContainerDied","Data":"48a22b64f58300674759c80467b6a65991a5fa70c243df9b5dc8efa6cf25cedf"} Jan 04 12:09:16 crc kubenswrapper[5003]: I0104 12:09:16.502112 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-767d96458c-sz57m" event={"ID":"ed3610d7-f747-4c99-b42c-35f1b05e17b4","Type":"ContainerStarted","Data":"eaf32f425b298c6650efcc26933beba134da35c3d8e8cdec9b5abd71145f3bcd"} Jan 04 12:09:16 crc kubenswrapper[5003]: I0104 12:09:16.504671 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-85q57"] Jan 04 12:09:16 crc kubenswrapper[5003]: I0104 12:09:16.519360 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-2zm85" event={"ID":"b47fe1da-877a-4d5e-a21b-c7955bd00b30","Type":"ContainerStarted","Data":"782d0879abd3c41319ef1fbae968d0335eec61a1791349e968383a980dff9f56"} Jan 04 12:09:16 crc kubenswrapper[5003]: I0104 12:09:16.519840 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-4qtmm"] Jan 04 12:09:16 crc kubenswrapper[5003]: I0104 12:09:16.545271 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"821beb7d-9467-4756-8f10-c178e1bcc89e","Type":"ContainerStarted","Data":"9e41c9d57b1d33a51fd91ddc8de771b6022a673f1246740aaf79ba205ddeaa7c"} Jan 04 12:09:16 crc kubenswrapper[5003]: I0104 12:09:16.551705 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-wm86m" event={"ID":"eee6a60e-d425-4f40-9895-62497236658f","Type":"ContainerStarted","Data":"7693732c8315b49159779b4dc18c5fe05484d703617b9730d466a74d1b0cfa18"} Jan 04 12:09:16 crc kubenswrapper[5003]: I0104 12:09:16.551897 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-wm86m" event={"ID":"eee6a60e-d425-4f40-9895-62497236658f","Type":"ContainerStarted","Data":"729f929acdc21071ff4afb0873053cf9748425d7c3a2a48e23d2a76a2c265420"} Jan 04 12:09:16 crc kubenswrapper[5003]: I0104 12:09:16.588785 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-wm86m" podStartSLOduration=2.588757257 podStartE2EDuration="2.588757257s" podCreationTimestamp="2026-01-04 12:09:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:16.580493181 +0000 UTC m=+1272.053523052" watchObservedRunningTime="2026-01-04 12:09:16.588757257 +0000 UTC m=+1272.061787098" Jan 04 12:09:16 crc kubenswrapper[5003]: I0104 12:09:16.649522 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fc6d4ffc7-mthsh"] Jan 04 12:09:16 crc kubenswrapper[5003]: W0104 12:09:16.655327 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod48ca11e7_47b1_4362_aab7_6afd4617a783.slice/crio-e293b336334d9c9521376b52679d4a319e965f1083ac39e595f220a6a81b8077 WatchSource:0}: Error finding container e293b336334d9c9521376b52679d4a319e965f1083ac39e595f220a6a81b8077: Status 404 returned error can't find the container with id e293b336334d9c9521376b52679d4a319e965f1083ac39e595f220a6a81b8077 Jan 04 12:09:16 crc kubenswrapper[5003]: I0104 12:09:16.934252 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-767d96458c-sz57m" Jan 04 12:09:17 crc kubenswrapper[5003]: I0104 12:09:17.020127 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lrq2s\" (UniqueName: \"kubernetes.io/projected/ed3610d7-f747-4c99-b42c-35f1b05e17b4-kube-api-access-lrq2s\") pod \"ed3610d7-f747-4c99-b42c-35f1b05e17b4\" (UID: \"ed3610d7-f747-4c99-b42c-35f1b05e17b4\") " Jan 04 12:09:17 crc kubenswrapper[5003]: I0104 12:09:17.020254 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ed3610d7-f747-4c99-b42c-35f1b05e17b4-ovsdbserver-sb\") pod \"ed3610d7-f747-4c99-b42c-35f1b05e17b4\" (UID: \"ed3610d7-f747-4c99-b42c-35f1b05e17b4\") " Jan 04 12:09:17 crc kubenswrapper[5003]: I0104 12:09:17.020410 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed3610d7-f747-4c99-b42c-35f1b05e17b4-dns-svc\") pod \"ed3610d7-f747-4c99-b42c-35f1b05e17b4\" (UID: \"ed3610d7-f747-4c99-b42c-35f1b05e17b4\") " Jan 04 12:09:17 crc kubenswrapper[5003]: I0104 12:09:17.020452 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ed3610d7-f747-4c99-b42c-35f1b05e17b4-ovsdbserver-nb\") pod \"ed3610d7-f747-4c99-b42c-35f1b05e17b4\" (UID: \"ed3610d7-f747-4c99-b42c-35f1b05e17b4\") " Jan 04 12:09:17 crc kubenswrapper[5003]: I0104 12:09:17.020574 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ed3610d7-f747-4c99-b42c-35f1b05e17b4-dns-swift-storage-0\") pod \"ed3610d7-f747-4c99-b42c-35f1b05e17b4\" (UID: \"ed3610d7-f747-4c99-b42c-35f1b05e17b4\") " Jan 04 12:09:17 crc kubenswrapper[5003]: I0104 12:09:17.020617 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed3610d7-f747-4c99-b42c-35f1b05e17b4-config\") pod \"ed3610d7-f747-4c99-b42c-35f1b05e17b4\" (UID: \"ed3610d7-f747-4c99-b42c-35f1b05e17b4\") " Jan 04 12:09:17 crc kubenswrapper[5003]: I0104 12:09:17.037267 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed3610d7-f747-4c99-b42c-35f1b05e17b4-kube-api-access-lrq2s" (OuterVolumeSpecName: "kube-api-access-lrq2s") pod "ed3610d7-f747-4c99-b42c-35f1b05e17b4" (UID: "ed3610d7-f747-4c99-b42c-35f1b05e17b4"). InnerVolumeSpecName "kube-api-access-lrq2s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:17 crc kubenswrapper[5003]: I0104 12:09:17.070728 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed3610d7-f747-4c99-b42c-35f1b05e17b4-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ed3610d7-f747-4c99-b42c-35f1b05e17b4" (UID: "ed3610d7-f747-4c99-b42c-35f1b05e17b4"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:17 crc kubenswrapper[5003]: I0104 12:09:17.070940 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed3610d7-f747-4c99-b42c-35f1b05e17b4-config" (OuterVolumeSpecName: "config") pod "ed3610d7-f747-4c99-b42c-35f1b05e17b4" (UID: "ed3610d7-f747-4c99-b42c-35f1b05e17b4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:17 crc kubenswrapper[5003]: I0104 12:09:17.071003 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed3610d7-f747-4c99-b42c-35f1b05e17b4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ed3610d7-f747-4c99-b42c-35f1b05e17b4" (UID: "ed3610d7-f747-4c99-b42c-35f1b05e17b4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:17 crc kubenswrapper[5003]: I0104 12:09:17.075404 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed3610d7-f747-4c99-b42c-35f1b05e17b4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ed3610d7-f747-4c99-b42c-35f1b05e17b4" (UID: "ed3610d7-f747-4c99-b42c-35f1b05e17b4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:17 crc kubenswrapper[5003]: I0104 12:09:17.103832 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed3610d7-f747-4c99-b42c-35f1b05e17b4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ed3610d7-f747-4c99-b42c-35f1b05e17b4" (UID: "ed3610d7-f747-4c99-b42c-35f1b05e17b4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:17 crc kubenswrapper[5003]: I0104 12:09:17.122439 5003 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ed3610d7-f747-4c99-b42c-35f1b05e17b4-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:17 crc kubenswrapper[5003]: I0104 12:09:17.122471 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed3610d7-f747-4c99-b42c-35f1b05e17b4-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:17 crc kubenswrapper[5003]: I0104 12:09:17.122483 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lrq2s\" (UniqueName: \"kubernetes.io/projected/ed3610d7-f747-4c99-b42c-35f1b05e17b4-kube-api-access-lrq2s\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:17 crc kubenswrapper[5003]: I0104 12:09:17.122494 5003 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ed3610d7-f747-4c99-b42c-35f1b05e17b4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:17 crc kubenswrapper[5003]: I0104 12:09:17.122506 5003 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ed3610d7-f747-4c99-b42c-35f1b05e17b4-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:17 crc kubenswrapper[5003]: I0104 12:09:17.122515 5003 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ed3610d7-f747-4c99-b42c-35f1b05e17b4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:17 crc kubenswrapper[5003]: I0104 12:09:17.579397 5003 generic.go:334] "Generic (PLEG): container finished" podID="48ca11e7-47b1-4362-aab7-6afd4617a783" containerID="8558f57fae281d29a260431d8c45a1c224e2447ca0adf1b78d568b246aff7ae0" exitCode=0 Jan 04 12:09:17 crc kubenswrapper[5003]: I0104 12:09:17.580744 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fc6d4ffc7-mthsh" event={"ID":"48ca11e7-47b1-4362-aab7-6afd4617a783","Type":"ContainerDied","Data":"8558f57fae281d29a260431d8c45a1c224e2447ca0adf1b78d568b246aff7ae0"} Jan 04 12:09:17 crc kubenswrapper[5003]: I0104 12:09:17.580774 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fc6d4ffc7-mthsh" event={"ID":"48ca11e7-47b1-4362-aab7-6afd4617a783","Type":"ContainerStarted","Data":"e293b336334d9c9521376b52679d4a319e965f1083ac39e595f220a6a81b8077"} Jan 04 12:09:17 crc kubenswrapper[5003]: I0104 12:09:17.584289 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-qx4js" event={"ID":"c2b0b250-33ee-45aa-baa8-1540d9f39b03","Type":"ContainerStarted","Data":"5ee917fddda1a94c7bd5209bb637ecb517e5f26bb8e5dc4d6a4f868837ab9f3d"} Jan 04 12:09:17 crc kubenswrapper[5003]: I0104 12:09:17.595183 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4qtmm" event={"ID":"a6fea540-25d6-49c0-86f9-9476e70ceb93","Type":"ContainerStarted","Data":"d88452cdae7133f339534ef3179cba669a43103313b0c64bdcd2e27bb4949757"} Jan 04 12:09:17 crc kubenswrapper[5003]: I0104 12:09:17.595236 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4qtmm" event={"ID":"a6fea540-25d6-49c0-86f9-9476e70ceb93","Type":"ContainerStarted","Data":"7c74dda6f691164be8b6126ad57d28d3cfcd8c71a260e7ba7128f9d866c81178"} Jan 04 12:09:17 crc kubenswrapper[5003]: I0104 12:09:17.616703 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-767d96458c-sz57m" event={"ID":"ed3610d7-f747-4c99-b42c-35f1b05e17b4","Type":"ContainerDied","Data":"eaf32f425b298c6650efcc26933beba134da35c3d8e8cdec9b5abd71145f3bcd"} Jan 04 12:09:17 crc kubenswrapper[5003]: I0104 12:09:17.616768 5003 scope.go:117] "RemoveContainer" containerID="48a22b64f58300674759c80467b6a65991a5fa70c243df9b5dc8efa6cf25cedf" Jan 04 12:09:17 crc kubenswrapper[5003]: I0104 12:09:17.616937 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-767d96458c-sz57m" Jan 04 12:09:17 crc kubenswrapper[5003]: I0104 12:09:17.624475 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-85q57" event={"ID":"724a46c8-1096-4753-ba3e-e4128189c8ae","Type":"ContainerStarted","Data":"77509cfdd5ff709178c08d751d94dfa194154845d41b6bef621c18db884e87e2"} Jan 04 12:09:17 crc kubenswrapper[5003]: I0104 12:09:17.665648 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:09:17 crc kubenswrapper[5003]: I0104 12:09:17.667056 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-4qtmm" podStartSLOduration=3.667031549 podStartE2EDuration="3.667031549s" podCreationTimestamp="2026-01-04 12:09:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:17.630260808 +0000 UTC m=+1273.103290649" watchObservedRunningTime="2026-01-04 12:09:17.667031549 +0000 UTC m=+1273.140061390" Jan 04 12:09:17 crc kubenswrapper[5003]: I0104 12:09:17.970927 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-767d96458c-sz57m"] Jan 04 12:09:17 crc kubenswrapper[5003]: I0104 12:09:17.976741 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-767d96458c-sz57m"] Jan 04 12:09:18 crc kubenswrapper[5003]: I0104 12:09:18.675782 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fc6d4ffc7-mthsh" event={"ID":"48ca11e7-47b1-4362-aab7-6afd4617a783","Type":"ContainerStarted","Data":"b79a4330d066b26a3e4085250e920174e017ade9cc789618d6fc81183c09a8ab"} Jan 04 12:09:18 crc kubenswrapper[5003]: I0104 12:09:18.676080 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7fc6d4ffc7-mthsh" Jan 04 12:09:18 crc kubenswrapper[5003]: I0104 12:09:18.707386 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7fc6d4ffc7-mthsh" podStartSLOduration=3.707365968 podStartE2EDuration="3.707365968s" podCreationTimestamp="2026-01-04 12:09:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:18.703155748 +0000 UTC m=+1274.176185609" watchObservedRunningTime="2026-01-04 12:09:18.707365968 +0000 UTC m=+1274.180395809" Jan 04 12:09:18 crc kubenswrapper[5003]: I0104 12:09:18.819915 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed3610d7-f747-4c99-b42c-35f1b05e17b4" path="/var/lib/kubelet/pods/ed3610d7-f747-4c99-b42c-35f1b05e17b4/volumes" Jan 04 12:09:20 crc kubenswrapper[5003]: I0104 12:09:20.709252 5003 generic.go:334] "Generic (PLEG): container finished" podID="eee6a60e-d425-4f40-9895-62497236658f" containerID="7693732c8315b49159779b4dc18c5fe05484d703617b9730d466a74d1b0cfa18" exitCode=0 Jan 04 12:09:20 crc kubenswrapper[5003]: I0104 12:09:20.709325 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-wm86m" event={"ID":"eee6a60e-d425-4f40-9895-62497236658f","Type":"ContainerDied","Data":"7693732c8315b49159779b4dc18c5fe05484d703617b9730d466a74d1b0cfa18"} Jan 04 12:09:22 crc kubenswrapper[5003]: I0104 12:09:22.742176 5003 generic.go:334] "Generic (PLEG): container finished" podID="95845d2e-e8cf-4d56-ad63-260115e0efc8" containerID="41ebcad121f9a4824ae310763b6db5ab85e6345eca024178476fc82f67538ebe" exitCode=0 Jan 04 12:09:22 crc kubenswrapper[5003]: I0104 12:09:22.742280 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-jvhdz" event={"ID":"95845d2e-e8cf-4d56-ad63-260115e0efc8","Type":"ContainerDied","Data":"41ebcad121f9a4824ae310763b6db5ab85e6345eca024178476fc82f67538ebe"} Jan 04 12:09:24 crc kubenswrapper[5003]: E0104 12:09:24.056545 5003 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc76d3acd_e992_42b8_8dcc_0f5f9ddbd02a.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96d2524c_4772_4eb6_b108_0513fce70ad8.slice/crio-1e67620f4c61ca7aa65a88f0a18d490a091db6475ec2f8c6bb6c265ee4ccf7ef\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96d2524c_4772_4eb6_b108_0513fce70ad8.slice\": RecentStats: unable to find data in memory cache]" Jan 04 12:09:25 crc kubenswrapper[5003]: I0104 12:09:25.778407 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7fc6d4ffc7-mthsh" Jan 04 12:09:25 crc kubenswrapper[5003]: I0104 12:09:25.858358 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8db84466c-8hs8q"] Jan 04 12:09:25 crc kubenswrapper[5003]: I0104 12:09:25.858686 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8db84466c-8hs8q" podUID="f88e16a0-fb70-4ea8-89c1-0a3397e246e0" containerName="dnsmasq-dns" containerID="cri-o://26c615be30307321a60553ff868cf3f2110b0cf2b3568f29830c1b3b02f8ddc0" gracePeriod=10 Jan 04 12:09:26 crc kubenswrapper[5003]: I0104 12:09:26.785832 5003 generic.go:334] "Generic (PLEG): container finished" podID="f88e16a0-fb70-4ea8-89c1-0a3397e246e0" containerID="26c615be30307321a60553ff868cf3f2110b0cf2b3568f29830c1b3b02f8ddc0" exitCode=0 Jan 04 12:09:26 crc kubenswrapper[5003]: I0104 12:09:26.785926 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8db84466c-8hs8q" event={"ID":"f88e16a0-fb70-4ea8-89c1-0a3397e246e0","Type":"ContainerDied","Data":"26c615be30307321a60553ff868cf3f2110b0cf2b3568f29830c1b3b02f8ddc0"} Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.755764 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-jvhdz" Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.761270 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-wm86m" Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.813628 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-wm86m" Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.823910 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-wm86m" event={"ID":"eee6a60e-d425-4f40-9895-62497236658f","Type":"ContainerDied","Data":"729f929acdc21071ff4afb0873053cf9748425d7c3a2a48e23d2a76a2c265420"} Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.823949 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="729f929acdc21071ff4afb0873053cf9748425d7c3a2a48e23d2a76a2c265420" Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.824491 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-jvhdz" event={"ID":"95845d2e-e8cf-4d56-ad63-260115e0efc8","Type":"ContainerDied","Data":"88e5e1fc04c450fd28bd8e291178470ed9e9e4acff0a29a2cc369251a9fc687e"} Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.824512 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="88e5e1fc04c450fd28bd8e291178470ed9e9e4acff0a29a2cc369251a9fc687e" Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.824553 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-jvhdz" Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.848691 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eee6a60e-d425-4f40-9895-62497236658f-config-data\") pod \"eee6a60e-d425-4f40-9895-62497236658f\" (UID: \"eee6a60e-d425-4f40-9895-62497236658f\") " Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.848743 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zdgc9\" (UniqueName: \"kubernetes.io/projected/95845d2e-e8cf-4d56-ad63-260115e0efc8-kube-api-access-zdgc9\") pod \"95845d2e-e8cf-4d56-ad63-260115e0efc8\" (UID: \"95845d2e-e8cf-4d56-ad63-260115e0efc8\") " Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.848778 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95845d2e-e8cf-4d56-ad63-260115e0efc8-combined-ca-bundle\") pod \"95845d2e-e8cf-4d56-ad63-260115e0efc8\" (UID: \"95845d2e-e8cf-4d56-ad63-260115e0efc8\") " Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.848799 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/95845d2e-e8cf-4d56-ad63-260115e0efc8-db-sync-config-data\") pod \"95845d2e-e8cf-4d56-ad63-260115e0efc8\" (UID: \"95845d2e-e8cf-4d56-ad63-260115e0efc8\") " Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.848824 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eee6a60e-d425-4f40-9895-62497236658f-fernet-keys\") pod \"eee6a60e-d425-4f40-9895-62497236658f\" (UID: \"eee6a60e-d425-4f40-9895-62497236658f\") " Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.848862 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95845d2e-e8cf-4d56-ad63-260115e0efc8-config-data\") pod \"95845d2e-e8cf-4d56-ad63-260115e0efc8\" (UID: \"95845d2e-e8cf-4d56-ad63-260115e0efc8\") " Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.848881 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eee6a60e-d425-4f40-9895-62497236658f-scripts\") pod \"eee6a60e-d425-4f40-9895-62497236658f\" (UID: \"eee6a60e-d425-4f40-9895-62497236658f\") " Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.848902 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mn2dx\" (UniqueName: \"kubernetes.io/projected/eee6a60e-d425-4f40-9895-62497236658f-kube-api-access-mn2dx\") pod \"eee6a60e-d425-4f40-9895-62497236658f\" (UID: \"eee6a60e-d425-4f40-9895-62497236658f\") " Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.848945 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eee6a60e-d425-4f40-9895-62497236658f-combined-ca-bundle\") pod \"eee6a60e-d425-4f40-9895-62497236658f\" (UID: \"eee6a60e-d425-4f40-9895-62497236658f\") " Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.848979 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/eee6a60e-d425-4f40-9895-62497236658f-credential-keys\") pod \"eee6a60e-d425-4f40-9895-62497236658f\" (UID: \"eee6a60e-d425-4f40-9895-62497236658f\") " Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.856566 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eee6a60e-d425-4f40-9895-62497236658f-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "eee6a60e-d425-4f40-9895-62497236658f" (UID: "eee6a60e-d425-4f40-9895-62497236658f"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.858102 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95845d2e-e8cf-4d56-ad63-260115e0efc8-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "95845d2e-e8cf-4d56-ad63-260115e0efc8" (UID: "95845d2e-e8cf-4d56-ad63-260115e0efc8"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.858998 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eee6a60e-d425-4f40-9895-62497236658f-scripts" (OuterVolumeSpecName: "scripts") pod "eee6a60e-d425-4f40-9895-62497236658f" (UID: "eee6a60e-d425-4f40-9895-62497236658f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.860450 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eee6a60e-d425-4f40-9895-62497236658f-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "eee6a60e-d425-4f40-9895-62497236658f" (UID: "eee6a60e-d425-4f40-9895-62497236658f"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.860917 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eee6a60e-d425-4f40-9895-62497236658f-kube-api-access-mn2dx" (OuterVolumeSpecName: "kube-api-access-mn2dx") pod "eee6a60e-d425-4f40-9895-62497236658f" (UID: "eee6a60e-d425-4f40-9895-62497236658f"). InnerVolumeSpecName "kube-api-access-mn2dx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.873725 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95845d2e-e8cf-4d56-ad63-260115e0efc8-kube-api-access-zdgc9" (OuterVolumeSpecName: "kube-api-access-zdgc9") pod "95845d2e-e8cf-4d56-ad63-260115e0efc8" (UID: "95845d2e-e8cf-4d56-ad63-260115e0efc8"). InnerVolumeSpecName "kube-api-access-zdgc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.879704 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eee6a60e-d425-4f40-9895-62497236658f-config-data" (OuterVolumeSpecName: "config-data") pod "eee6a60e-d425-4f40-9895-62497236658f" (UID: "eee6a60e-d425-4f40-9895-62497236658f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.884694 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eee6a60e-d425-4f40-9895-62497236658f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eee6a60e-d425-4f40-9895-62497236658f" (UID: "eee6a60e-d425-4f40-9895-62497236658f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.884915 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-8db84466c-8hs8q" podUID="f88e16a0-fb70-4ea8-89c1-0a3397e246e0" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.138:5353: connect: connection refused" Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.897293 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95845d2e-e8cf-4d56-ad63-260115e0efc8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "95845d2e-e8cf-4d56-ad63-260115e0efc8" (UID: "95845d2e-e8cf-4d56-ad63-260115e0efc8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.912994 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95845d2e-e8cf-4d56-ad63-260115e0efc8-config-data" (OuterVolumeSpecName: "config-data") pod "95845d2e-e8cf-4d56-ad63-260115e0efc8" (UID: "95845d2e-e8cf-4d56-ad63-260115e0efc8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.951814 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95845d2e-e8cf-4d56-ad63-260115e0efc8-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.951879 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eee6a60e-d425-4f40-9895-62497236658f-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.951895 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mn2dx\" (UniqueName: \"kubernetes.io/projected/eee6a60e-d425-4f40-9895-62497236658f-kube-api-access-mn2dx\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.951932 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eee6a60e-d425-4f40-9895-62497236658f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.951945 5003 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/eee6a60e-d425-4f40-9895-62497236658f-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.951957 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eee6a60e-d425-4f40-9895-62497236658f-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.951973 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zdgc9\" (UniqueName: \"kubernetes.io/projected/95845d2e-e8cf-4d56-ad63-260115e0efc8-kube-api-access-zdgc9\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.952005 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95845d2e-e8cf-4d56-ad63-260115e0efc8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.952034 5003 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/95845d2e-e8cf-4d56-ad63-260115e0efc8-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:28 crc kubenswrapper[5003]: I0104 12:09:28.952046 5003 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eee6a60e-d425-4f40-9895-62497236658f-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:29 crc kubenswrapper[5003]: I0104 12:09:29.941702 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-wm86m"] Jan 04 12:09:29 crc kubenswrapper[5003]: I0104 12:09:29.949858 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-wm86m"] Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.055844 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-frvgd"] Jan 04 12:09:30 crc kubenswrapper[5003]: E0104 12:09:30.056461 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eee6a60e-d425-4f40-9895-62497236658f" containerName="keystone-bootstrap" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.056489 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="eee6a60e-d425-4f40-9895-62497236658f" containerName="keystone-bootstrap" Jan 04 12:09:30 crc kubenswrapper[5003]: E0104 12:09:30.056510 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95845d2e-e8cf-4d56-ad63-260115e0efc8" containerName="glance-db-sync" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.056518 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="95845d2e-e8cf-4d56-ad63-260115e0efc8" containerName="glance-db-sync" Jan 04 12:09:30 crc kubenswrapper[5003]: E0104 12:09:30.056548 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed3610d7-f747-4c99-b42c-35f1b05e17b4" containerName="init" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.056555 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed3610d7-f747-4c99-b42c-35f1b05e17b4" containerName="init" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.056812 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="95845d2e-e8cf-4d56-ad63-260115e0efc8" containerName="glance-db-sync" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.056835 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="eee6a60e-d425-4f40-9895-62497236658f" containerName="keystone-bootstrap" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.056861 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed3610d7-f747-4c99-b42c-35f1b05e17b4" containerName="init" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.057912 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-frvgd" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.062094 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-nsd8s" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.062535 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.065453 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-frvgd"] Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.065973 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.066107 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.066404 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.077505 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-scripts\") pod \"keystone-bootstrap-frvgd\" (UID: \"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263\") " pod="openstack/keystone-bootstrap-frvgd" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.077564 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-combined-ca-bundle\") pod \"keystone-bootstrap-frvgd\" (UID: \"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263\") " pod="openstack/keystone-bootstrap-frvgd" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.077630 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zm6mh\" (UniqueName: \"kubernetes.io/projected/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-kube-api-access-zm6mh\") pod \"keystone-bootstrap-frvgd\" (UID: \"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263\") " pod="openstack/keystone-bootstrap-frvgd" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.077729 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-fernet-keys\") pod \"keystone-bootstrap-frvgd\" (UID: \"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263\") " pod="openstack/keystone-bootstrap-frvgd" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.077760 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-config-data\") pod \"keystone-bootstrap-frvgd\" (UID: \"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263\") " pod="openstack/keystone-bootstrap-frvgd" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.077800 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-credential-keys\") pod \"keystone-bootstrap-frvgd\" (UID: \"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263\") " pod="openstack/keystone-bootstrap-frvgd" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.179462 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-fernet-keys\") pod \"keystone-bootstrap-frvgd\" (UID: \"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263\") " pod="openstack/keystone-bootstrap-frvgd" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.179864 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-config-data\") pod \"keystone-bootstrap-frvgd\" (UID: \"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263\") " pod="openstack/keystone-bootstrap-frvgd" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.179885 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-credential-keys\") pod \"keystone-bootstrap-frvgd\" (UID: \"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263\") " pod="openstack/keystone-bootstrap-frvgd" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.179948 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-scripts\") pod \"keystone-bootstrap-frvgd\" (UID: \"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263\") " pod="openstack/keystone-bootstrap-frvgd" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.179969 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-combined-ca-bundle\") pod \"keystone-bootstrap-frvgd\" (UID: \"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263\") " pod="openstack/keystone-bootstrap-frvgd" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.180052 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zm6mh\" (UniqueName: \"kubernetes.io/projected/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-kube-api-access-zm6mh\") pod \"keystone-bootstrap-frvgd\" (UID: \"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263\") " pod="openstack/keystone-bootstrap-frvgd" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.187409 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-scripts\") pod \"keystone-bootstrap-frvgd\" (UID: \"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263\") " pod="openstack/keystone-bootstrap-frvgd" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.187544 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-config-data\") pod \"keystone-bootstrap-frvgd\" (UID: \"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263\") " pod="openstack/keystone-bootstrap-frvgd" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.188566 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-credential-keys\") pod \"keystone-bootstrap-frvgd\" (UID: \"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263\") " pod="openstack/keystone-bootstrap-frvgd" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.192862 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-fernet-keys\") pod \"keystone-bootstrap-frvgd\" (UID: \"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263\") " pod="openstack/keystone-bootstrap-frvgd" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.205663 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-combined-ca-bundle\") pod \"keystone-bootstrap-frvgd\" (UID: \"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263\") " pod="openstack/keystone-bootstrap-frvgd" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.232672 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zm6mh\" (UniqueName: \"kubernetes.io/projected/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-kube-api-access-zm6mh\") pod \"keystone-bootstrap-frvgd\" (UID: \"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263\") " pod="openstack/keystone-bootstrap-frvgd" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.390464 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-frvgd" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.467624 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6f6f8cb849-vgfn5"] Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.469905 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f6f8cb849-vgfn5" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.486711 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6f6f8cb849-vgfn5"] Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.493286 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/39ba0aea-ad4b-4466-a337-58c4916cc266-ovsdbserver-sb\") pod \"dnsmasq-dns-6f6f8cb849-vgfn5\" (UID: \"39ba0aea-ad4b-4466-a337-58c4916cc266\") " pod="openstack/dnsmasq-dns-6f6f8cb849-vgfn5" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.493355 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39ba0aea-ad4b-4466-a337-58c4916cc266-config\") pod \"dnsmasq-dns-6f6f8cb849-vgfn5\" (UID: \"39ba0aea-ad4b-4466-a337-58c4916cc266\") " pod="openstack/dnsmasq-dns-6f6f8cb849-vgfn5" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.493390 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/39ba0aea-ad4b-4466-a337-58c4916cc266-dns-swift-storage-0\") pod \"dnsmasq-dns-6f6f8cb849-vgfn5\" (UID: \"39ba0aea-ad4b-4466-a337-58c4916cc266\") " pod="openstack/dnsmasq-dns-6f6f8cb849-vgfn5" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.493418 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/39ba0aea-ad4b-4466-a337-58c4916cc266-ovsdbserver-nb\") pod \"dnsmasq-dns-6f6f8cb849-vgfn5\" (UID: \"39ba0aea-ad4b-4466-a337-58c4916cc266\") " pod="openstack/dnsmasq-dns-6f6f8cb849-vgfn5" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.493451 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/39ba0aea-ad4b-4466-a337-58c4916cc266-dns-svc\") pod \"dnsmasq-dns-6f6f8cb849-vgfn5\" (UID: \"39ba0aea-ad4b-4466-a337-58c4916cc266\") " pod="openstack/dnsmasq-dns-6f6f8cb849-vgfn5" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.493509 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dchz\" (UniqueName: \"kubernetes.io/projected/39ba0aea-ad4b-4466-a337-58c4916cc266-kube-api-access-6dchz\") pod \"dnsmasq-dns-6f6f8cb849-vgfn5\" (UID: \"39ba0aea-ad4b-4466-a337-58c4916cc266\") " pod="openstack/dnsmasq-dns-6f6f8cb849-vgfn5" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.594653 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/39ba0aea-ad4b-4466-a337-58c4916cc266-dns-svc\") pod \"dnsmasq-dns-6f6f8cb849-vgfn5\" (UID: \"39ba0aea-ad4b-4466-a337-58c4916cc266\") " pod="openstack/dnsmasq-dns-6f6f8cb849-vgfn5" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.594737 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dchz\" (UniqueName: \"kubernetes.io/projected/39ba0aea-ad4b-4466-a337-58c4916cc266-kube-api-access-6dchz\") pod \"dnsmasq-dns-6f6f8cb849-vgfn5\" (UID: \"39ba0aea-ad4b-4466-a337-58c4916cc266\") " pod="openstack/dnsmasq-dns-6f6f8cb849-vgfn5" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.594763 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/39ba0aea-ad4b-4466-a337-58c4916cc266-ovsdbserver-sb\") pod \"dnsmasq-dns-6f6f8cb849-vgfn5\" (UID: \"39ba0aea-ad4b-4466-a337-58c4916cc266\") " pod="openstack/dnsmasq-dns-6f6f8cb849-vgfn5" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.594801 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39ba0aea-ad4b-4466-a337-58c4916cc266-config\") pod \"dnsmasq-dns-6f6f8cb849-vgfn5\" (UID: \"39ba0aea-ad4b-4466-a337-58c4916cc266\") " pod="openstack/dnsmasq-dns-6f6f8cb849-vgfn5" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.594830 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/39ba0aea-ad4b-4466-a337-58c4916cc266-dns-swift-storage-0\") pod \"dnsmasq-dns-6f6f8cb849-vgfn5\" (UID: \"39ba0aea-ad4b-4466-a337-58c4916cc266\") " pod="openstack/dnsmasq-dns-6f6f8cb849-vgfn5" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.594853 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/39ba0aea-ad4b-4466-a337-58c4916cc266-ovsdbserver-nb\") pod \"dnsmasq-dns-6f6f8cb849-vgfn5\" (UID: \"39ba0aea-ad4b-4466-a337-58c4916cc266\") " pod="openstack/dnsmasq-dns-6f6f8cb849-vgfn5" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.596400 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39ba0aea-ad4b-4466-a337-58c4916cc266-config\") pod \"dnsmasq-dns-6f6f8cb849-vgfn5\" (UID: \"39ba0aea-ad4b-4466-a337-58c4916cc266\") " pod="openstack/dnsmasq-dns-6f6f8cb849-vgfn5" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.597334 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/39ba0aea-ad4b-4466-a337-58c4916cc266-dns-svc\") pod \"dnsmasq-dns-6f6f8cb849-vgfn5\" (UID: \"39ba0aea-ad4b-4466-a337-58c4916cc266\") " pod="openstack/dnsmasq-dns-6f6f8cb849-vgfn5" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.598291 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/39ba0aea-ad4b-4466-a337-58c4916cc266-ovsdbserver-sb\") pod \"dnsmasq-dns-6f6f8cb849-vgfn5\" (UID: \"39ba0aea-ad4b-4466-a337-58c4916cc266\") " pod="openstack/dnsmasq-dns-6f6f8cb849-vgfn5" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.598426 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/39ba0aea-ad4b-4466-a337-58c4916cc266-dns-swift-storage-0\") pod \"dnsmasq-dns-6f6f8cb849-vgfn5\" (UID: \"39ba0aea-ad4b-4466-a337-58c4916cc266\") " pod="openstack/dnsmasq-dns-6f6f8cb849-vgfn5" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.598770 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/39ba0aea-ad4b-4466-a337-58c4916cc266-ovsdbserver-nb\") pod \"dnsmasq-dns-6f6f8cb849-vgfn5\" (UID: \"39ba0aea-ad4b-4466-a337-58c4916cc266\") " pod="openstack/dnsmasq-dns-6f6f8cb849-vgfn5" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.632623 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dchz\" (UniqueName: \"kubernetes.io/projected/39ba0aea-ad4b-4466-a337-58c4916cc266-kube-api-access-6dchz\") pod \"dnsmasq-dns-6f6f8cb849-vgfn5\" (UID: \"39ba0aea-ad4b-4466-a337-58c4916cc266\") " pod="openstack/dnsmasq-dns-6f6f8cb849-vgfn5" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.848823 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eee6a60e-d425-4f40-9895-62497236658f" path="/var/lib/kubelet/pods/eee6a60e-d425-4f40-9895-62497236658f/volumes" Jan 04 12:09:30 crc kubenswrapper[5003]: I0104 12:09:30.867053 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f6f8cb849-vgfn5" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.258818 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.260493 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.262915 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.264043 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-xn574" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.264205 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.289155 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.419231 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.419292 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-logs\") pod \"glance-default-external-api-0\" (UID: \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.419325 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.419351 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhrc6\" (UniqueName: \"kubernetes.io/projected/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-kube-api-access-zhrc6\") pod \"glance-default-external-api-0\" (UID: \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.419373 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.419494 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-config-data\") pod \"glance-default-external-api-0\" (UID: \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.419695 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-scripts\") pod \"glance-default-external-api-0\" (UID: \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.521642 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-scripts\") pod \"glance-default-external-api-0\" (UID: \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.521735 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.521771 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.521786 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhrc6\" (UniqueName: \"kubernetes.io/projected/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-kube-api-access-zhrc6\") pod \"glance-default-external-api-0\" (UID: \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.521803 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-logs\") pod \"glance-default-external-api-0\" (UID: \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.521817 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.521865 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-config-data\") pod \"glance-default-external-api-0\" (UID: \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.522647 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-logs\") pod \"glance-default-external-api-0\" (UID: \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.522711 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.522783 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.528262 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.530131 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-scripts\") pod \"glance-default-external-api-0\" (UID: \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.532559 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-config-data\") pod \"glance-default-external-api-0\" (UID: \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.541157 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhrc6\" (UniqueName: \"kubernetes.io/projected/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-kube-api-access-zhrc6\") pod \"glance-default-external-api-0\" (UID: \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.550530 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.581038 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.806911 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.808531 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.814492 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.817231 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.928328 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9372c572-7b81-4ee3-b766-20ca2345997e-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9372c572-7b81-4ee3-b766-20ca2345997e\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.928439 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9372c572-7b81-4ee3-b766-20ca2345997e-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9372c572-7b81-4ee3-b766-20ca2345997e\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.928498 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lq948\" (UniqueName: \"kubernetes.io/projected/9372c572-7b81-4ee3-b766-20ca2345997e-kube-api-access-lq948\") pod \"glance-default-internal-api-0\" (UID: \"9372c572-7b81-4ee3-b766-20ca2345997e\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.928523 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9372c572-7b81-4ee3-b766-20ca2345997e-logs\") pod \"glance-default-internal-api-0\" (UID: \"9372c572-7b81-4ee3-b766-20ca2345997e\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.928571 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9372c572-7b81-4ee3-b766-20ca2345997e-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9372c572-7b81-4ee3-b766-20ca2345997e\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.928596 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"9372c572-7b81-4ee3-b766-20ca2345997e\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:31 crc kubenswrapper[5003]: I0104 12:09:31.928615 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9372c572-7b81-4ee3-b766-20ca2345997e-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9372c572-7b81-4ee3-b766-20ca2345997e\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:32 crc kubenswrapper[5003]: I0104 12:09:32.031141 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9372c572-7b81-4ee3-b766-20ca2345997e-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9372c572-7b81-4ee3-b766-20ca2345997e\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:32 crc kubenswrapper[5003]: I0104 12:09:32.031219 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"9372c572-7b81-4ee3-b766-20ca2345997e\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:32 crc kubenswrapper[5003]: I0104 12:09:32.031244 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9372c572-7b81-4ee3-b766-20ca2345997e-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9372c572-7b81-4ee3-b766-20ca2345997e\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:32 crc kubenswrapper[5003]: I0104 12:09:32.031296 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9372c572-7b81-4ee3-b766-20ca2345997e-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9372c572-7b81-4ee3-b766-20ca2345997e\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:32 crc kubenswrapper[5003]: I0104 12:09:32.031374 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9372c572-7b81-4ee3-b766-20ca2345997e-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9372c572-7b81-4ee3-b766-20ca2345997e\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:32 crc kubenswrapper[5003]: I0104 12:09:32.031438 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lq948\" (UniqueName: \"kubernetes.io/projected/9372c572-7b81-4ee3-b766-20ca2345997e-kube-api-access-lq948\") pod \"glance-default-internal-api-0\" (UID: \"9372c572-7b81-4ee3-b766-20ca2345997e\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:32 crc kubenswrapper[5003]: I0104 12:09:32.031469 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9372c572-7b81-4ee3-b766-20ca2345997e-logs\") pod \"glance-default-internal-api-0\" (UID: \"9372c572-7b81-4ee3-b766-20ca2345997e\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:32 crc kubenswrapper[5003]: I0104 12:09:32.032459 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9372c572-7b81-4ee3-b766-20ca2345997e-logs\") pod \"glance-default-internal-api-0\" (UID: \"9372c572-7b81-4ee3-b766-20ca2345997e\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:32 crc kubenswrapper[5003]: I0104 12:09:32.032887 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"9372c572-7b81-4ee3-b766-20ca2345997e\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-internal-api-0" Jan 04 12:09:32 crc kubenswrapper[5003]: I0104 12:09:32.033046 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9372c572-7b81-4ee3-b766-20ca2345997e-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9372c572-7b81-4ee3-b766-20ca2345997e\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:32 crc kubenswrapper[5003]: I0104 12:09:32.037432 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9372c572-7b81-4ee3-b766-20ca2345997e-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9372c572-7b81-4ee3-b766-20ca2345997e\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:32 crc kubenswrapper[5003]: I0104 12:09:32.048379 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9372c572-7b81-4ee3-b766-20ca2345997e-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9372c572-7b81-4ee3-b766-20ca2345997e\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:32 crc kubenswrapper[5003]: I0104 12:09:32.065940 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9372c572-7b81-4ee3-b766-20ca2345997e-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9372c572-7b81-4ee3-b766-20ca2345997e\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:32 crc kubenswrapper[5003]: I0104 12:09:32.066178 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lq948\" (UniqueName: \"kubernetes.io/projected/9372c572-7b81-4ee3-b766-20ca2345997e-kube-api-access-lq948\") pod \"glance-default-internal-api-0\" (UID: \"9372c572-7b81-4ee3-b766-20ca2345997e\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:32 crc kubenswrapper[5003]: I0104 12:09:32.084451 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"9372c572-7b81-4ee3-b766-20ca2345997e\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:32 crc kubenswrapper[5003]: I0104 12:09:32.129286 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 04 12:09:33 crc kubenswrapper[5003]: I0104 12:09:33.187422 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:09:33 crc kubenswrapper[5003]: I0104 12:09:33.314792 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:09:33 crc kubenswrapper[5003]: I0104 12:09:33.884066 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-8db84466c-8hs8q" podUID="f88e16a0-fb70-4ea8-89c1-0a3397e246e0" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.138:5353: connect: connection refused" Jan 04 12:09:34 crc kubenswrapper[5003]: E0104 12:09:34.312907 5003 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96d2524c_4772_4eb6_b108_0513fce70ad8.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc76d3acd_e992_42b8_8dcc_0f5f9ddbd02a.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96d2524c_4772_4eb6_b108_0513fce70ad8.slice/crio-1e67620f4c61ca7aa65a88f0a18d490a091db6475ec2f8c6bb6c265ee4ccf7ef\": RecentStats: unable to find data in memory cache]" Jan 04 12:09:39 crc kubenswrapper[5003]: E0104 12:09:39.145510 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:fe32d3ea620f0c7ecfdde9bbf28417fde03bc18c6f60b1408fa8da24d8188f16" Jan 04 12:09:39 crc kubenswrapper[5003]: E0104 12:09:39.146383 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:fe32d3ea620f0c7ecfdde9bbf28417fde03bc18c6f60b1408fa8da24d8188f16,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ghd9k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-qx4js_openstack(c2b0b250-33ee-45aa-baa8-1540d9f39b03): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:09:39 crc kubenswrapper[5003]: E0104 12:09:39.147640 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-qx4js" podUID="c2b0b250-33ee-45aa-baa8-1540d9f39b03" Jan 04 12:09:39 crc kubenswrapper[5003]: E0104 12:09:39.954871 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:fe32d3ea620f0c7ecfdde9bbf28417fde03bc18c6f60b1408fa8da24d8188f16\\\"\"" pod="openstack/barbican-db-sync-qx4js" podUID="c2b0b250-33ee-45aa-baa8-1540d9f39b03" Jan 04 12:09:40 crc kubenswrapper[5003]: E0104 12:09:40.414707 5003 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b59b7445e581cc720038107e421371c86c5765b2967e77d884ef29b1d9fd0f49" Jan 04 12:09:40 crc kubenswrapper[5003]: E0104 12:09:40.415118 5003 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b59b7445e581cc720038107e421371c86c5765b2967e77d884ef29b1d9fd0f49,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-trhwh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-2zm85_openstack(b47fe1da-877a-4d5e-a21b-c7955bd00b30): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 04 12:09:40 crc kubenswrapper[5003]: E0104 12:09:40.416308 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-2zm85" podUID="b47fe1da-877a-4d5e-a21b-c7955bd00b30" Jan 04 12:09:40 crc kubenswrapper[5003]: I0104 12:09:40.676862 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8db84466c-8hs8q" Jan 04 12:09:40 crc kubenswrapper[5003]: I0104 12:09:40.811555 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-config\") pod \"f88e16a0-fb70-4ea8-89c1-0a3397e246e0\" (UID: \"f88e16a0-fb70-4ea8-89c1-0a3397e246e0\") " Jan 04 12:09:40 crc kubenswrapper[5003]: I0104 12:09:40.811649 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xld9x\" (UniqueName: \"kubernetes.io/projected/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-kube-api-access-xld9x\") pod \"f88e16a0-fb70-4ea8-89c1-0a3397e246e0\" (UID: \"f88e16a0-fb70-4ea8-89c1-0a3397e246e0\") " Jan 04 12:09:40 crc kubenswrapper[5003]: I0104 12:09:40.811715 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-dns-svc\") pod \"f88e16a0-fb70-4ea8-89c1-0a3397e246e0\" (UID: \"f88e16a0-fb70-4ea8-89c1-0a3397e246e0\") " Jan 04 12:09:40 crc kubenswrapper[5003]: I0104 12:09:40.811826 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-dns-swift-storage-0\") pod \"f88e16a0-fb70-4ea8-89c1-0a3397e246e0\" (UID: \"f88e16a0-fb70-4ea8-89c1-0a3397e246e0\") " Jan 04 12:09:40 crc kubenswrapper[5003]: I0104 12:09:40.811942 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-ovsdbserver-nb\") pod \"f88e16a0-fb70-4ea8-89c1-0a3397e246e0\" (UID: \"f88e16a0-fb70-4ea8-89c1-0a3397e246e0\") " Jan 04 12:09:40 crc kubenswrapper[5003]: I0104 12:09:40.811997 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-ovsdbserver-sb\") pod \"f88e16a0-fb70-4ea8-89c1-0a3397e246e0\" (UID: \"f88e16a0-fb70-4ea8-89c1-0a3397e246e0\") " Jan 04 12:09:40 crc kubenswrapper[5003]: I0104 12:09:40.821463 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-kube-api-access-xld9x" (OuterVolumeSpecName: "kube-api-access-xld9x") pod "f88e16a0-fb70-4ea8-89c1-0a3397e246e0" (UID: "f88e16a0-fb70-4ea8-89c1-0a3397e246e0"). InnerVolumeSpecName "kube-api-access-xld9x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:40 crc kubenswrapper[5003]: I0104 12:09:40.906817 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f88e16a0-fb70-4ea8-89c1-0a3397e246e0" (UID: "f88e16a0-fb70-4ea8-89c1-0a3397e246e0"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:40 crc kubenswrapper[5003]: I0104 12:09:40.911708 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-config" (OuterVolumeSpecName: "config") pod "f88e16a0-fb70-4ea8-89c1-0a3397e246e0" (UID: "f88e16a0-fb70-4ea8-89c1-0a3397e246e0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:40 crc kubenswrapper[5003]: I0104 12:09:40.914320 5003 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:40 crc kubenswrapper[5003]: I0104 12:09:40.914343 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:40 crc kubenswrapper[5003]: I0104 12:09:40.914354 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xld9x\" (UniqueName: \"kubernetes.io/projected/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-kube-api-access-xld9x\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:40 crc kubenswrapper[5003]: I0104 12:09:40.918569 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f88e16a0-fb70-4ea8-89c1-0a3397e246e0" (UID: "f88e16a0-fb70-4ea8-89c1-0a3397e246e0"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:40 crc kubenswrapper[5003]: I0104 12:09:40.924455 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "f88e16a0-fb70-4ea8-89c1-0a3397e246e0" (UID: "f88e16a0-fb70-4ea8-89c1-0a3397e246e0"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:40 crc kubenswrapper[5003]: I0104 12:09:40.932703 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f88e16a0-fb70-4ea8-89c1-0a3397e246e0" (UID: "f88e16a0-fb70-4ea8-89c1-0a3397e246e0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:40 crc kubenswrapper[5003]: I0104 12:09:40.952258 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6f6f8cb849-vgfn5"] Jan 04 12:09:40 crc kubenswrapper[5003]: I0104 12:09:40.964811 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"821beb7d-9467-4756-8f10-c178e1bcc89e","Type":"ContainerStarted","Data":"c085d569914ded5d944b3467b6ebb0f3080261379a66abf62ca6879f362fc88c"} Jan 04 12:09:40 crc kubenswrapper[5003]: I0104 12:09:40.966415 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-85q57" event={"ID":"724a46c8-1096-4753-ba3e-e4128189c8ae","Type":"ContainerStarted","Data":"e1001d955f14cbfea06a5f428203b4c75630684120b8c0116be5dc27fc4cb0e6"} Jan 04 12:09:40 crc kubenswrapper[5003]: I0104 12:09:40.975556 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8db84466c-8hs8q" event={"ID":"f88e16a0-fb70-4ea8-89c1-0a3397e246e0","Type":"ContainerDied","Data":"364c23a8ae77def1e183d40fb84f2e56f6ef42933891fe3909da2289aaf5c53e"} Jan 04 12:09:40 crc kubenswrapper[5003]: I0104 12:09:40.975606 5003 scope.go:117] "RemoveContainer" containerID="26c615be30307321a60553ff868cf3f2110b0cf2b3568f29830c1b3b02f8ddc0" Jan 04 12:09:40 crc kubenswrapper[5003]: I0104 12:09:40.983048 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8db84466c-8hs8q" Jan 04 12:09:40 crc kubenswrapper[5003]: E0104 12:09:40.989228 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:b59b7445e581cc720038107e421371c86c5765b2967e77d884ef29b1d9fd0f49\\\"\"" pod="openstack/cinder-db-sync-2zm85" podUID="b47fe1da-877a-4d5e-a21b-c7955bd00b30" Jan 04 12:09:41 crc kubenswrapper[5003]: I0104 12:09:41.001593 5003 scope.go:117] "RemoveContainer" containerID="fc96ecd6b4da1b59dea246c08c2535bce123d49b5fff66d672320cccfc32ed03" Jan 04 12:09:41 crc kubenswrapper[5003]: I0104 12:09:41.003640 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-85q57" podStartSLOduration=2.132536318 podStartE2EDuration="26.003618749s" podCreationTimestamp="2026-01-04 12:09:15 +0000 UTC" firstStartedPulling="2026-01-04 12:09:16.49780704 +0000 UTC m=+1271.970836891" lastFinishedPulling="2026-01-04 12:09:40.368889481 +0000 UTC m=+1295.841919322" observedRunningTime="2026-01-04 12:09:40.985234908 +0000 UTC m=+1296.458264749" watchObservedRunningTime="2026-01-04 12:09:41.003618749 +0000 UTC m=+1296.476648590" Jan 04 12:09:41 crc kubenswrapper[5003]: I0104 12:09:41.015696 5003 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:41 crc kubenswrapper[5003]: I0104 12:09:41.015725 5003 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:41 crc kubenswrapper[5003]: I0104 12:09:41.015737 5003 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f88e16a0-fb70-4ea8-89c1-0a3397e246e0-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:41 crc kubenswrapper[5003]: I0104 12:09:41.054643 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:09:41 crc kubenswrapper[5003]: W0104 12:09:41.058153 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podad9eaaf9_42c0_4f5a_9a2f_8127198d5e69.slice/crio-9b719609d20504effe839ec56fb62ddb45298b33001e14d7fd1227364fc41866 WatchSource:0}: Error finding container 9b719609d20504effe839ec56fb62ddb45298b33001e14d7fd1227364fc41866: Status 404 returned error can't find the container with id 9b719609d20504effe839ec56fb62ddb45298b33001e14d7fd1227364fc41866 Jan 04 12:09:41 crc kubenswrapper[5003]: I0104 12:09:41.073060 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8db84466c-8hs8q"] Jan 04 12:09:41 crc kubenswrapper[5003]: I0104 12:09:41.082809 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8db84466c-8hs8q"] Jan 04 12:09:41 crc kubenswrapper[5003]: I0104 12:09:41.190037 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-frvgd"] Jan 04 12:09:41 crc kubenswrapper[5003]: W0104 12:09:41.191536 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09e11bbd_46d5_4fa2_97f1_75b8cdd3b263.slice/crio-c71aa9eaf5015e29e642571c1d1de0c8df2d44413621ac12cb047c74c11e2b91 WatchSource:0}: Error finding container c71aa9eaf5015e29e642571c1d1de0c8df2d44413621ac12cb047c74c11e2b91: Status 404 returned error can't find the container with id c71aa9eaf5015e29e642571c1d1de0c8df2d44413621ac12cb047c74c11e2b91 Jan 04 12:09:42 crc kubenswrapper[5003]: I0104 12:09:42.006257 5003 generic.go:334] "Generic (PLEG): container finished" podID="39ba0aea-ad4b-4466-a337-58c4916cc266" containerID="290a10abec17be70f9a71d44563646133e37ba9bf169d27baf01abfa7096a9c4" exitCode=0 Jan 04 12:09:42 crc kubenswrapper[5003]: I0104 12:09:42.006833 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f6f8cb849-vgfn5" event={"ID":"39ba0aea-ad4b-4466-a337-58c4916cc266","Type":"ContainerDied","Data":"290a10abec17be70f9a71d44563646133e37ba9bf169d27baf01abfa7096a9c4"} Jan 04 12:09:42 crc kubenswrapper[5003]: I0104 12:09:42.006874 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f6f8cb849-vgfn5" event={"ID":"39ba0aea-ad4b-4466-a337-58c4916cc266","Type":"ContainerStarted","Data":"068ae1a4771923882266708c4aa80883845ed8bb687f1b272bc79a2c04bd7d3c"} Jan 04 12:09:42 crc kubenswrapper[5003]: I0104 12:09:42.024589 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-frvgd" event={"ID":"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263","Type":"ContainerStarted","Data":"226223174f46da10dbf31ee136700ad731638fcc1c5e819f7db0525babaa70a9"} Jan 04 12:09:42 crc kubenswrapper[5003]: I0104 12:09:42.025058 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-frvgd" event={"ID":"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263","Type":"ContainerStarted","Data":"c71aa9eaf5015e29e642571c1d1de0c8df2d44413621ac12cb047c74c11e2b91"} Jan 04 12:09:42 crc kubenswrapper[5003]: I0104 12:09:42.027429 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69","Type":"ContainerStarted","Data":"5a7d6b62d5eb5a067633640d5a8c4d9f061fee4b3fe19f5f6477ff18f649dc67"} Jan 04 12:09:42 crc kubenswrapper[5003]: I0104 12:09:42.027456 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69","Type":"ContainerStarted","Data":"9b719609d20504effe839ec56fb62ddb45298b33001e14d7fd1227364fc41866"} Jan 04 12:09:42 crc kubenswrapper[5003]: I0104 12:09:42.070946 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:09:42 crc kubenswrapper[5003]: I0104 12:09:42.088406 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-frvgd" podStartSLOduration=12.08838368 podStartE2EDuration="12.08838368s" podCreationTimestamp="2026-01-04 12:09:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:42.078170563 +0000 UTC m=+1297.551200404" watchObservedRunningTime="2026-01-04 12:09:42.08838368 +0000 UTC m=+1297.561413521" Jan 04 12:09:42 crc kubenswrapper[5003]: I0104 12:09:42.822893 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88e16a0-fb70-4ea8-89c1-0a3397e246e0" path="/var/lib/kubelet/pods/f88e16a0-fb70-4ea8-89c1-0a3397e246e0/volumes" Jan 04 12:09:43 crc kubenswrapper[5003]: I0104 12:09:43.035792 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69","Type":"ContainerStarted","Data":"7b93e344ea9373fed704cb47f39b25ccc531df76df6e16371bfa29610b80bb7c"} Jan 04 12:09:43 crc kubenswrapper[5003]: I0104 12:09:43.035974 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69" containerName="glance-log" containerID="cri-o://5a7d6b62d5eb5a067633640d5a8c4d9f061fee4b3fe19f5f6477ff18f649dc67" gracePeriod=30 Jan 04 12:09:43 crc kubenswrapper[5003]: I0104 12:09:43.037129 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69" containerName="glance-httpd" containerID="cri-o://7b93e344ea9373fed704cb47f39b25ccc531df76df6e16371bfa29610b80bb7c" gracePeriod=30 Jan 04 12:09:43 crc kubenswrapper[5003]: I0104 12:09:43.039349 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f6f8cb849-vgfn5" event={"ID":"39ba0aea-ad4b-4466-a337-58c4916cc266","Type":"ContainerStarted","Data":"58bc66315cecaf7bdc3b7f3e10a49dc3af3752df4772fc76596222d0585c9b3b"} Jan 04 12:09:43 crc kubenswrapper[5003]: I0104 12:09:43.039846 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6f6f8cb849-vgfn5" Jan 04 12:09:43 crc kubenswrapper[5003]: I0104 12:09:43.041544 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9372c572-7b81-4ee3-b766-20ca2345997e","Type":"ContainerStarted","Data":"69d90b84644502974848df495427d0da191028519e4f2786deced6fbd632d86a"} Jan 04 12:09:43 crc kubenswrapper[5003]: I0104 12:09:43.045693 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"821beb7d-9467-4756-8f10-c178e1bcc89e","Type":"ContainerStarted","Data":"c6b22dfb4eaf9c5e39ba2fda19f5b31d279469302c9a6ed03250f62a24bc648f"} Jan 04 12:09:43 crc kubenswrapper[5003]: I0104 12:09:43.074354 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=13.074324408 podStartE2EDuration="13.074324408s" podCreationTimestamp="2026-01-04 12:09:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:43.06713265 +0000 UTC m=+1298.540162491" watchObservedRunningTime="2026-01-04 12:09:43.074324408 +0000 UTC m=+1298.547354249" Jan 04 12:09:43 crc kubenswrapper[5003]: I0104 12:09:43.109621 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6f6f8cb849-vgfn5" podStartSLOduration=13.10960193 podStartE2EDuration="13.10960193s" podCreationTimestamp="2026-01-04 12:09:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:43.102492924 +0000 UTC m=+1298.575522775" watchObservedRunningTime="2026-01-04 12:09:43.10960193 +0000 UTC m=+1298.582631771" Jan 04 12:09:43 crc kubenswrapper[5003]: I0104 12:09:43.884332 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-8db84466c-8hs8q" podUID="f88e16a0-fb70-4ea8-89c1-0a3397e246e0" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.138:5353: i/o timeout" Jan 04 12:09:44 crc kubenswrapper[5003]: I0104 12:09:44.054324 5003 generic.go:334] "Generic (PLEG): container finished" podID="ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69" containerID="5a7d6b62d5eb5a067633640d5a8c4d9f061fee4b3fe19f5f6477ff18f649dc67" exitCode=143 Jan 04 12:09:44 crc kubenswrapper[5003]: I0104 12:09:44.054704 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69","Type":"ContainerDied","Data":"5a7d6b62d5eb5a067633640d5a8c4d9f061fee4b3fe19f5f6477ff18f649dc67"} Jan 04 12:09:44 crc kubenswrapper[5003]: E0104 12:09:44.537105 5003 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96d2524c_4772_4eb6_b108_0513fce70ad8.slice/crio-1e67620f4c61ca7aa65a88f0a18d490a091db6475ec2f8c6bb6c265ee4ccf7ef\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc76d3acd_e992_42b8_8dcc_0f5f9ddbd02a.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96d2524c_4772_4eb6_b108_0513fce70ad8.slice\": RecentStats: unable to find data in memory cache]" Jan 04 12:09:45 crc kubenswrapper[5003]: I0104 12:09:45.062875 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9372c572-7b81-4ee3-b766-20ca2345997e","Type":"ContainerStarted","Data":"c62d8449456260fbf100deabd2ae19670c3a5bcd1ee2175b812d4316e4cf4397"} Jan 04 12:09:46 crc kubenswrapper[5003]: I0104 12:09:46.078667 5003 generic.go:334] "Generic (PLEG): container finished" podID="724a46c8-1096-4753-ba3e-e4128189c8ae" containerID="e1001d955f14cbfea06a5f428203b4c75630684120b8c0116be5dc27fc4cb0e6" exitCode=0 Jan 04 12:09:46 crc kubenswrapper[5003]: I0104 12:09:46.078749 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-85q57" event={"ID":"724a46c8-1096-4753-ba3e-e4128189c8ae","Type":"ContainerDied","Data":"e1001d955f14cbfea06a5f428203b4c75630684120b8c0116be5dc27fc4cb0e6"} Jan 04 12:09:46 crc kubenswrapper[5003]: I0104 12:09:46.082745 5003 generic.go:334] "Generic (PLEG): container finished" podID="ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69" containerID="7b93e344ea9373fed704cb47f39b25ccc531df76df6e16371bfa29610b80bb7c" exitCode=0 Jan 04 12:09:46 crc kubenswrapper[5003]: I0104 12:09:46.082794 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69","Type":"ContainerDied","Data":"7b93e344ea9373fed704cb47f39b25ccc531df76df6e16371bfa29610b80bb7c"} Jan 04 12:09:46 crc kubenswrapper[5003]: I0104 12:09:46.475994 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 04 12:09:46 crc kubenswrapper[5003]: I0104 12:09:46.545543 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-logs\") pod \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\" (UID: \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\") " Jan 04 12:09:46 crc kubenswrapper[5003]: I0104 12:09:46.545615 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-httpd-run\") pod \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\" (UID: \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\") " Jan 04 12:09:46 crc kubenswrapper[5003]: I0104 12:09:46.545737 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-scripts\") pod \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\" (UID: \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\") " Jan 04 12:09:46 crc kubenswrapper[5003]: I0104 12:09:46.545762 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zhrc6\" (UniqueName: \"kubernetes.io/projected/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-kube-api-access-zhrc6\") pod \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\" (UID: \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\") " Jan 04 12:09:46 crc kubenswrapper[5003]: I0104 12:09:46.545800 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\" (UID: \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\") " Jan 04 12:09:46 crc kubenswrapper[5003]: I0104 12:09:46.545885 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-config-data\") pod \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\" (UID: \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\") " Jan 04 12:09:46 crc kubenswrapper[5003]: I0104 12:09:46.545939 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-combined-ca-bundle\") pod \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\" (UID: \"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69\") " Jan 04 12:09:46 crc kubenswrapper[5003]: I0104 12:09:46.547664 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-logs" (OuterVolumeSpecName: "logs") pod "ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69" (UID: "ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:09:46 crc kubenswrapper[5003]: I0104 12:09:46.547927 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69" (UID: "ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:09:46 crc kubenswrapper[5003]: I0104 12:09:46.563924 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69" (UID: "ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 04 12:09:46 crc kubenswrapper[5003]: I0104 12:09:46.577372 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-scripts" (OuterVolumeSpecName: "scripts") pod "ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69" (UID: "ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:46 crc kubenswrapper[5003]: I0104 12:09:46.585256 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-kube-api-access-zhrc6" (OuterVolumeSpecName: "kube-api-access-zhrc6") pod "ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69" (UID: "ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69"). InnerVolumeSpecName "kube-api-access-zhrc6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:46 crc kubenswrapper[5003]: I0104 12:09:46.651211 5003 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:46 crc kubenswrapper[5003]: I0104 12:09:46.651240 5003 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:46 crc kubenswrapper[5003]: I0104 12:09:46.651251 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:46 crc kubenswrapper[5003]: I0104 12:09:46.651261 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zhrc6\" (UniqueName: \"kubernetes.io/projected/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-kube-api-access-zhrc6\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:46 crc kubenswrapper[5003]: I0104 12:09:46.651282 5003 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Jan 04 12:09:46 crc kubenswrapper[5003]: I0104 12:09:46.660282 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69" (UID: "ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:46 crc kubenswrapper[5003]: I0104 12:09:46.682194 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-config-data" (OuterVolumeSpecName: "config-data") pod "ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69" (UID: "ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:46 crc kubenswrapper[5003]: I0104 12:09:46.690643 5003 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Jan 04 12:09:46 crc kubenswrapper[5003]: I0104 12:09:46.752855 5003 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:46 crc kubenswrapper[5003]: I0104 12:09:46.752888 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:46 crc kubenswrapper[5003]: I0104 12:09:46.752902 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.094907 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69","Type":"ContainerDied","Data":"9b719609d20504effe839ec56fb62ddb45298b33001e14d7fd1227364fc41866"} Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.094989 5003 scope.go:117] "RemoveContainer" containerID="7b93e344ea9373fed704cb47f39b25ccc531df76df6e16371bfa29610b80bb7c" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.094990 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.104573 5003 generic.go:334] "Generic (PLEG): container finished" podID="09e11bbd-46d5-4fa2-97f1-75b8cdd3b263" containerID="226223174f46da10dbf31ee136700ad731638fcc1c5e819f7db0525babaa70a9" exitCode=0 Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.104654 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-frvgd" event={"ID":"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263","Type":"ContainerDied","Data":"226223174f46da10dbf31ee136700ad731638fcc1c5e819f7db0525babaa70a9"} Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.112828 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="9372c572-7b81-4ee3-b766-20ca2345997e" containerName="glance-log" containerID="cri-o://c62d8449456260fbf100deabd2ae19670c3a5bcd1ee2175b812d4316e4cf4397" gracePeriod=30 Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.113118 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="9372c572-7b81-4ee3-b766-20ca2345997e" containerName="glance-httpd" containerID="cri-o://239d49624ddcc45308caa188e3b977b2e23593fa49dfd088c1185e8de51d8d23" gracePeriod=30 Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.113120 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9372c572-7b81-4ee3-b766-20ca2345997e","Type":"ContainerStarted","Data":"239d49624ddcc45308caa188e3b977b2e23593fa49dfd088c1185e8de51d8d23"} Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.128557 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.149832 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.162622 5003 scope.go:117] "RemoveContainer" containerID="5a7d6b62d5eb5a067633640d5a8c4d9f061fee4b3fe19f5f6477ff18f649dc67" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.173194 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:09:47 crc kubenswrapper[5003]: E0104 12:09:47.173763 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f88e16a0-fb70-4ea8-89c1-0a3397e246e0" containerName="init" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.173788 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f88e16a0-fb70-4ea8-89c1-0a3397e246e0" containerName="init" Jan 04 12:09:47 crc kubenswrapper[5003]: E0104 12:09:47.173808 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69" containerName="glance-httpd" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.173818 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69" containerName="glance-httpd" Jan 04 12:09:47 crc kubenswrapper[5003]: E0104 12:09:47.173836 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69" containerName="glance-log" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.173845 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69" containerName="glance-log" Jan 04 12:09:47 crc kubenswrapper[5003]: E0104 12:09:47.173869 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f88e16a0-fb70-4ea8-89c1-0a3397e246e0" containerName="dnsmasq-dns" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.173877 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f88e16a0-fb70-4ea8-89c1-0a3397e246e0" containerName="dnsmasq-dns" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.174259 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69" containerName="glance-httpd" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.174284 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69" containerName="glance-log" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.174296 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f88e16a0-fb70-4ea8-89c1-0a3397e246e0" containerName="dnsmasq-dns" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.175530 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.180488 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.180693 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.221768 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.225354 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=17.225336446 podStartE2EDuration="17.225336446s" podCreationTimestamp="2026-01-04 12:09:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:47.168964973 +0000 UTC m=+1302.641994814" watchObservedRunningTime="2026-01-04 12:09:47.225336446 +0000 UTC m=+1302.698366277" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.269199 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-config-data\") pod \"glance-default-external-api-0\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.269242 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhbr7\" (UniqueName: \"kubernetes.io/projected/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-kube-api-access-vhbr7\") pod \"glance-default-external-api-0\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.269298 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-logs\") pod \"glance-default-external-api-0\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.269373 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.269411 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.269433 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-scripts\") pod \"glance-default-external-api-0\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.269456 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.269513 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.371670 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.371720 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.371796 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhbr7\" (UniqueName: \"kubernetes.io/projected/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-kube-api-access-vhbr7\") pod \"glance-default-external-api-0\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.371812 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-config-data\") pod \"glance-default-external-api-0\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.371858 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-logs\") pod \"glance-default-external-api-0\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.371885 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.371915 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.371934 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-scripts\") pod \"glance-default-external-api-0\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.372943 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.373320 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-logs\") pod \"glance-default-external-api-0\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.373330 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.378619 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-scripts\") pod \"glance-default-external-api-0\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.378938 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.381730 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.393350 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-config-data\") pod \"glance-default-external-api-0\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.395039 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhbr7\" (UniqueName: \"kubernetes.io/projected/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-kube-api-access-vhbr7\") pod \"glance-default-external-api-0\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.475456 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") " pod="openstack/glance-default-external-api-0" Jan 04 12:09:47 crc kubenswrapper[5003]: I0104 12:09:47.495096 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 04 12:09:48 crc kubenswrapper[5003]: I0104 12:09:48.126220 5003 generic.go:334] "Generic (PLEG): container finished" podID="a6fea540-25d6-49c0-86f9-9476e70ceb93" containerID="d88452cdae7133f339534ef3179cba669a43103313b0c64bdcd2e27bb4949757" exitCode=0 Jan 04 12:09:48 crc kubenswrapper[5003]: I0104 12:09:48.126431 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4qtmm" event={"ID":"a6fea540-25d6-49c0-86f9-9476e70ceb93","Type":"ContainerDied","Data":"d88452cdae7133f339534ef3179cba669a43103313b0c64bdcd2e27bb4949757"} Jan 04 12:09:48 crc kubenswrapper[5003]: I0104 12:09:48.128626 5003 generic.go:334] "Generic (PLEG): container finished" podID="9372c572-7b81-4ee3-b766-20ca2345997e" containerID="239d49624ddcc45308caa188e3b977b2e23593fa49dfd088c1185e8de51d8d23" exitCode=0 Jan 04 12:09:48 crc kubenswrapper[5003]: I0104 12:09:48.128644 5003 generic.go:334] "Generic (PLEG): container finished" podID="9372c572-7b81-4ee3-b766-20ca2345997e" containerID="c62d8449456260fbf100deabd2ae19670c3a5bcd1ee2175b812d4316e4cf4397" exitCode=143 Jan 04 12:09:48 crc kubenswrapper[5003]: I0104 12:09:48.128694 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9372c572-7b81-4ee3-b766-20ca2345997e","Type":"ContainerDied","Data":"239d49624ddcc45308caa188e3b977b2e23593fa49dfd088c1185e8de51d8d23"} Jan 04 12:09:48 crc kubenswrapper[5003]: I0104 12:09:48.128731 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9372c572-7b81-4ee3-b766-20ca2345997e","Type":"ContainerDied","Data":"c62d8449456260fbf100deabd2ae19670c3a5bcd1ee2175b812d4316e4cf4397"} Jan 04 12:09:48 crc kubenswrapper[5003]: I0104 12:09:48.816747 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69" path="/var/lib/kubelet/pods/ad9eaaf9-42c0-4f5a-9a2f-8127198d5e69/volumes" Jan 04 12:09:50 crc kubenswrapper[5003]: I0104 12:09:50.869277 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6f6f8cb849-vgfn5" Jan 04 12:09:50 crc kubenswrapper[5003]: I0104 12:09:50.942624 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fc6d4ffc7-mthsh"] Jan 04 12:09:50 crc kubenswrapper[5003]: I0104 12:09:50.943139 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7fc6d4ffc7-mthsh" podUID="48ca11e7-47b1-4362-aab7-6afd4617a783" containerName="dnsmasq-dns" containerID="cri-o://b79a4330d066b26a3e4085250e920174e017ade9cc789618d6fc81183c09a8ab" gracePeriod=10 Jan 04 12:09:50 crc kubenswrapper[5003]: I0104 12:09:50.970601 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4qtmm" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.030308 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-85q57" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.037469 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-frvgd" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.153097 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-combined-ca-bundle\") pod \"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263\" (UID: \"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263\") " Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.153147 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-config-data\") pod \"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263\" (UID: \"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263\") " Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.153183 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s57cp\" (UniqueName: \"kubernetes.io/projected/724a46c8-1096-4753-ba3e-e4128189c8ae-kube-api-access-s57cp\") pod \"724a46c8-1096-4753-ba3e-e4128189c8ae\" (UID: \"724a46c8-1096-4753-ba3e-e4128189c8ae\") " Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.153252 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zm6mh\" (UniqueName: \"kubernetes.io/projected/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-kube-api-access-zm6mh\") pod \"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263\" (UID: \"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263\") " Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.153281 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a6fea540-25d6-49c0-86f9-9476e70ceb93-config\") pod \"a6fea540-25d6-49c0-86f9-9476e70ceb93\" (UID: \"a6fea540-25d6-49c0-86f9-9476e70ceb93\") " Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.153309 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/724a46c8-1096-4753-ba3e-e4128189c8ae-scripts\") pod \"724a46c8-1096-4753-ba3e-e4128189c8ae\" (UID: \"724a46c8-1096-4753-ba3e-e4128189c8ae\") " Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.153374 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-credential-keys\") pod \"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263\" (UID: \"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263\") " Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.153397 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/724a46c8-1096-4753-ba3e-e4128189c8ae-combined-ca-bundle\") pod \"724a46c8-1096-4753-ba3e-e4128189c8ae\" (UID: \"724a46c8-1096-4753-ba3e-e4128189c8ae\") " Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.153444 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6fea540-25d6-49c0-86f9-9476e70ceb93-combined-ca-bundle\") pod \"a6fea540-25d6-49c0-86f9-9476e70ceb93\" (UID: \"a6fea540-25d6-49c0-86f9-9476e70ceb93\") " Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.153474 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/724a46c8-1096-4753-ba3e-e4128189c8ae-logs\") pod \"724a46c8-1096-4753-ba3e-e4128189c8ae\" (UID: \"724a46c8-1096-4753-ba3e-e4128189c8ae\") " Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.153491 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/724a46c8-1096-4753-ba3e-e4128189c8ae-config-data\") pod \"724a46c8-1096-4753-ba3e-e4128189c8ae\" (UID: \"724a46c8-1096-4753-ba3e-e4128189c8ae\") " Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.153551 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-scripts\") pod \"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263\" (UID: \"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263\") " Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.153582 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-fernet-keys\") pod \"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263\" (UID: \"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263\") " Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.153620 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6twh9\" (UniqueName: \"kubernetes.io/projected/a6fea540-25d6-49c0-86f9-9476e70ceb93-kube-api-access-6twh9\") pod \"a6fea540-25d6-49c0-86f9-9476e70ceb93\" (UID: \"a6fea540-25d6-49c0-86f9-9476e70ceb93\") " Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.154973 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/724a46c8-1096-4753-ba3e-e4128189c8ae-logs" (OuterVolumeSpecName: "logs") pod "724a46c8-1096-4753-ba3e-e4128189c8ae" (UID: "724a46c8-1096-4753-ba3e-e4128189c8ae"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.179735 5003 generic.go:334] "Generic (PLEG): container finished" podID="48ca11e7-47b1-4362-aab7-6afd4617a783" containerID="b79a4330d066b26a3e4085250e920174e017ade9cc789618d6fc81183c09a8ab" exitCode=0 Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.179866 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fc6d4ffc7-mthsh" event={"ID":"48ca11e7-47b1-4362-aab7-6afd4617a783","Type":"ContainerDied","Data":"b79a4330d066b26a3e4085250e920174e017ade9cc789618d6fc81183c09a8ab"} Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.189538 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-frvgd" event={"ID":"09e11bbd-46d5-4fa2-97f1-75b8cdd3b263","Type":"ContainerDied","Data":"c71aa9eaf5015e29e642571c1d1de0c8df2d44413621ac12cb047c74c11e2b91"} Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.189592 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c71aa9eaf5015e29e642571c1d1de0c8df2d44413621ac12cb047c74c11e2b91" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.189672 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-frvgd" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.195781 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6fea540-25d6-49c0-86f9-9476e70ceb93-kube-api-access-6twh9" (OuterVolumeSpecName: "kube-api-access-6twh9") pod "a6fea540-25d6-49c0-86f9-9476e70ceb93" (UID: "a6fea540-25d6-49c0-86f9-9476e70ceb93"). InnerVolumeSpecName "kube-api-access-6twh9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.196184 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4qtmm" event={"ID":"a6fea540-25d6-49c0-86f9-9476e70ceb93","Type":"ContainerDied","Data":"7c74dda6f691164be8b6126ad57d28d3cfcd8c71a260e7ba7128f9d866c81178"} Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.196221 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7c74dda6f691164be8b6126ad57d28d3cfcd8c71a260e7ba7128f9d866c81178" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.196293 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4qtmm" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.200930 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-85q57" event={"ID":"724a46c8-1096-4753-ba3e-e4128189c8ae","Type":"ContainerDied","Data":"77509cfdd5ff709178c08d751d94dfa194154845d41b6bef621c18db884e87e2"} Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.200973 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="77509cfdd5ff709178c08d751d94dfa194154845d41b6bef621c18db884e87e2" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.201040 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-85q57" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.205508 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-scripts" (OuterVolumeSpecName: "scripts") pod "09e11bbd-46d5-4fa2-97f1-75b8cdd3b263" (UID: "09e11bbd-46d5-4fa2-97f1-75b8cdd3b263"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.205857 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/724a46c8-1096-4753-ba3e-e4128189c8ae-kube-api-access-s57cp" (OuterVolumeSpecName: "kube-api-access-s57cp") pod "724a46c8-1096-4753-ba3e-e4128189c8ae" (UID: "724a46c8-1096-4753-ba3e-e4128189c8ae"). InnerVolumeSpecName "kube-api-access-s57cp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.205530 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-kube-api-access-zm6mh" (OuterVolumeSpecName: "kube-api-access-zm6mh") pod "09e11bbd-46d5-4fa2-97f1-75b8cdd3b263" (UID: "09e11bbd-46d5-4fa2-97f1-75b8cdd3b263"). InnerVolumeSpecName "kube-api-access-zm6mh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.206007 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/724a46c8-1096-4753-ba3e-e4128189c8ae-scripts" (OuterVolumeSpecName: "scripts") pod "724a46c8-1096-4753-ba3e-e4128189c8ae" (UID: "724a46c8-1096-4753-ba3e-e4128189c8ae"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.207606 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "09e11bbd-46d5-4fa2-97f1-75b8cdd3b263" (UID: "09e11bbd-46d5-4fa2-97f1-75b8cdd3b263"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.207659 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "09e11bbd-46d5-4fa2-97f1-75b8cdd3b263" (UID: "09e11bbd-46d5-4fa2-97f1-75b8cdd3b263"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.249484 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6fea540-25d6-49c0-86f9-9476e70ceb93-config" (OuterVolumeSpecName: "config") pod "a6fea540-25d6-49c0-86f9-9476e70ceb93" (UID: "a6fea540-25d6-49c0-86f9-9476e70ceb93"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.255594 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.255632 5003 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.255646 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6twh9\" (UniqueName: \"kubernetes.io/projected/a6fea540-25d6-49c0-86f9-9476e70ceb93-kube-api-access-6twh9\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.255656 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s57cp\" (UniqueName: \"kubernetes.io/projected/724a46c8-1096-4753-ba3e-e4128189c8ae-kube-api-access-s57cp\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.255666 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zm6mh\" (UniqueName: \"kubernetes.io/projected/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-kube-api-access-zm6mh\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.255678 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/a6fea540-25d6-49c0-86f9-9476e70ceb93-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.255687 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/724a46c8-1096-4753-ba3e-e4128189c8ae-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.255695 5003 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.255704 5003 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/724a46c8-1096-4753-ba3e-e4128189c8ae-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.261834 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "09e11bbd-46d5-4fa2-97f1-75b8cdd3b263" (UID: "09e11bbd-46d5-4fa2-97f1-75b8cdd3b263"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.279155 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/724a46c8-1096-4753-ba3e-e4128189c8ae-config-data" (OuterVolumeSpecName: "config-data") pod "724a46c8-1096-4753-ba3e-e4128189c8ae" (UID: "724a46c8-1096-4753-ba3e-e4128189c8ae"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.282698 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.291167 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-config-data" (OuterVolumeSpecName: "config-data") pod "09e11bbd-46d5-4fa2-97f1-75b8cdd3b263" (UID: "09e11bbd-46d5-4fa2-97f1-75b8cdd3b263"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.293399 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/724a46c8-1096-4753-ba3e-e4128189c8ae-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "724a46c8-1096-4753-ba3e-e4128189c8ae" (UID: "724a46c8-1096-4753-ba3e-e4128189c8ae"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.290857 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6fea540-25d6-49c0-86f9-9476e70ceb93-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a6fea540-25d6-49c0-86f9-9476e70ceb93" (UID: "a6fea540-25d6-49c0-86f9-9476e70ceb93"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.359717 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9372c572-7b81-4ee3-b766-20ca2345997e-combined-ca-bundle\") pod \"9372c572-7b81-4ee3-b766-20ca2345997e\" (UID: \"9372c572-7b81-4ee3-b766-20ca2345997e\") " Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.359911 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9372c572-7b81-4ee3-b766-20ca2345997e-logs\") pod \"9372c572-7b81-4ee3-b766-20ca2345997e\" (UID: \"9372c572-7b81-4ee3-b766-20ca2345997e\") " Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.359957 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"9372c572-7b81-4ee3-b766-20ca2345997e\" (UID: \"9372c572-7b81-4ee3-b766-20ca2345997e\") " Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.359987 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9372c572-7b81-4ee3-b766-20ca2345997e-httpd-run\") pod \"9372c572-7b81-4ee3-b766-20ca2345997e\" (UID: \"9372c572-7b81-4ee3-b766-20ca2345997e\") " Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.360060 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9372c572-7b81-4ee3-b766-20ca2345997e-config-data\") pod \"9372c572-7b81-4ee3-b766-20ca2345997e\" (UID: \"9372c572-7b81-4ee3-b766-20ca2345997e\") " Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.360079 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9372c572-7b81-4ee3-b766-20ca2345997e-scripts\") pod \"9372c572-7b81-4ee3-b766-20ca2345997e\" (UID: \"9372c572-7b81-4ee3-b766-20ca2345997e\") " Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.360168 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lq948\" (UniqueName: \"kubernetes.io/projected/9372c572-7b81-4ee3-b766-20ca2345997e-kube-api-access-lq948\") pod \"9372c572-7b81-4ee3-b766-20ca2345997e\" (UID: \"9372c572-7b81-4ee3-b766-20ca2345997e\") " Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.360574 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.360587 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.360596 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/724a46c8-1096-4753-ba3e-e4128189c8ae-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.360604 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6fea540-25d6-49c0-86f9-9476e70ceb93-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.360612 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/724a46c8-1096-4753-ba3e-e4128189c8ae-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.361162 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9372c572-7b81-4ee3-b766-20ca2345997e-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "9372c572-7b81-4ee3-b766-20ca2345997e" (UID: "9372c572-7b81-4ee3-b766-20ca2345997e"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.361430 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9372c572-7b81-4ee3-b766-20ca2345997e-logs" (OuterVolumeSpecName: "logs") pod "9372c572-7b81-4ee3-b766-20ca2345997e" (UID: "9372c572-7b81-4ee3-b766-20ca2345997e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.368313 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9372c572-7b81-4ee3-b766-20ca2345997e-scripts" (OuterVolumeSpecName: "scripts") pod "9372c572-7b81-4ee3-b766-20ca2345997e" (UID: "9372c572-7b81-4ee3-b766-20ca2345997e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.371197 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9372c572-7b81-4ee3-b766-20ca2345997e-kube-api-access-lq948" (OuterVolumeSpecName: "kube-api-access-lq948") pod "9372c572-7b81-4ee3-b766-20ca2345997e" (UID: "9372c572-7b81-4ee3-b766-20ca2345997e"). InnerVolumeSpecName "kube-api-access-lq948". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.371274 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "9372c572-7b81-4ee3-b766-20ca2345997e" (UID: "9372c572-7b81-4ee3-b766-20ca2345997e"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.392469 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9372c572-7b81-4ee3-b766-20ca2345997e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9372c572-7b81-4ee3-b766-20ca2345997e" (UID: "9372c572-7b81-4ee3-b766-20ca2345997e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.419239 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9372c572-7b81-4ee3-b766-20ca2345997e-config-data" (OuterVolumeSpecName: "config-data") pod "9372c572-7b81-4ee3-b766-20ca2345997e" (UID: "9372c572-7b81-4ee3-b766-20ca2345997e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.462382 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9372c572-7b81-4ee3-b766-20ca2345997e-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.462417 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9372c572-7b81-4ee3-b766-20ca2345997e-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.462427 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lq948\" (UniqueName: \"kubernetes.io/projected/9372c572-7b81-4ee3-b766-20ca2345997e-kube-api-access-lq948\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.462438 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9372c572-7b81-4ee3-b766-20ca2345997e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.462447 5003 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9372c572-7b81-4ee3-b766-20ca2345997e-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.462484 5003 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.462494 5003 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9372c572-7b81-4ee3-b766-20ca2345997e-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.514855 5003 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.555587 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fc6d4ffc7-mthsh" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.563734 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/48ca11e7-47b1-4362-aab7-6afd4617a783-dns-svc\") pod \"48ca11e7-47b1-4362-aab7-6afd4617a783\" (UID: \"48ca11e7-47b1-4362-aab7-6afd4617a783\") " Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.563785 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48ca11e7-47b1-4362-aab7-6afd4617a783-config\") pod \"48ca11e7-47b1-4362-aab7-6afd4617a783\" (UID: \"48ca11e7-47b1-4362-aab7-6afd4617a783\") " Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.563813 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/48ca11e7-47b1-4362-aab7-6afd4617a783-ovsdbserver-sb\") pod \"48ca11e7-47b1-4362-aab7-6afd4617a783\" (UID: \"48ca11e7-47b1-4362-aab7-6afd4617a783\") " Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.563872 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/48ca11e7-47b1-4362-aab7-6afd4617a783-dns-swift-storage-0\") pod \"48ca11e7-47b1-4362-aab7-6afd4617a783\" (UID: \"48ca11e7-47b1-4362-aab7-6afd4617a783\") " Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.563934 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/48ca11e7-47b1-4362-aab7-6afd4617a783-ovsdbserver-nb\") pod \"48ca11e7-47b1-4362-aab7-6afd4617a783\" (UID: \"48ca11e7-47b1-4362-aab7-6afd4617a783\") " Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.563986 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jccbg\" (UniqueName: \"kubernetes.io/projected/48ca11e7-47b1-4362-aab7-6afd4617a783-kube-api-access-jccbg\") pod \"48ca11e7-47b1-4362-aab7-6afd4617a783\" (UID: \"48ca11e7-47b1-4362-aab7-6afd4617a783\") " Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.564386 5003 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.598270 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48ca11e7-47b1-4362-aab7-6afd4617a783-kube-api-access-jccbg" (OuterVolumeSpecName: "kube-api-access-jccbg") pod "48ca11e7-47b1-4362-aab7-6afd4617a783" (UID: "48ca11e7-47b1-4362-aab7-6afd4617a783"). InnerVolumeSpecName "kube-api-access-jccbg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:09:51 crc kubenswrapper[5003]: W0104 12:09:51.611534 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod328505b1_7d4b_44ae_a5d1_f77f22f2e79c.slice/crio-1d8c5aad56d71c8fbbfcbdce0249adf35a6b858031352c9e96b1b3fb433bdd24 WatchSource:0}: Error finding container 1d8c5aad56d71c8fbbfcbdce0249adf35a6b858031352c9e96b1b3fb433bdd24: Status 404 returned error can't find the container with id 1d8c5aad56d71c8fbbfcbdce0249adf35a6b858031352c9e96b1b3fb433bdd24 Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.625668 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48ca11e7-47b1-4362-aab7-6afd4617a783-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "48ca11e7-47b1-4362-aab7-6afd4617a783" (UID: "48ca11e7-47b1-4362-aab7-6afd4617a783"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.627945 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.644851 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48ca11e7-47b1-4362-aab7-6afd4617a783-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "48ca11e7-47b1-4362-aab7-6afd4617a783" (UID: "48ca11e7-47b1-4362-aab7-6afd4617a783"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.646107 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48ca11e7-47b1-4362-aab7-6afd4617a783-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "48ca11e7-47b1-4362-aab7-6afd4617a783" (UID: "48ca11e7-47b1-4362-aab7-6afd4617a783"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.649899 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48ca11e7-47b1-4362-aab7-6afd4617a783-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "48ca11e7-47b1-4362-aab7-6afd4617a783" (UID: "48ca11e7-47b1-4362-aab7-6afd4617a783"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.655430 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48ca11e7-47b1-4362-aab7-6afd4617a783-config" (OuterVolumeSpecName: "config") pod "48ca11e7-47b1-4362-aab7-6afd4617a783" (UID: "48ca11e7-47b1-4362-aab7-6afd4617a783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.666531 5003 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/48ca11e7-47b1-4362-aab7-6afd4617a783-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.666561 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48ca11e7-47b1-4362-aab7-6afd4617a783-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.666575 5003 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/48ca11e7-47b1-4362-aab7-6afd4617a783-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.666586 5003 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/48ca11e7-47b1-4362-aab7-6afd4617a783-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.666597 5003 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/48ca11e7-47b1-4362-aab7-6afd4617a783-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:51 crc kubenswrapper[5003]: I0104 12:09:51.666606 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jccbg\" (UniqueName: \"kubernetes.io/projected/48ca11e7-47b1-4362-aab7-6afd4617a783-kube-api-access-jccbg\") on node \"crc\" DevicePath \"\"" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.202843 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-85dcb76789-v5z7d"] Jan 04 12:09:52 crc kubenswrapper[5003]: E0104 12:09:52.203645 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48ca11e7-47b1-4362-aab7-6afd4617a783" containerName="init" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.203662 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="48ca11e7-47b1-4362-aab7-6afd4617a783" containerName="init" Jan 04 12:09:52 crc kubenswrapper[5003]: E0104 12:09:52.203676 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48ca11e7-47b1-4362-aab7-6afd4617a783" containerName="dnsmasq-dns" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.203682 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="48ca11e7-47b1-4362-aab7-6afd4617a783" containerName="dnsmasq-dns" Jan 04 12:09:52 crc kubenswrapper[5003]: E0104 12:09:52.203695 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="724a46c8-1096-4753-ba3e-e4128189c8ae" containerName="placement-db-sync" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.203702 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="724a46c8-1096-4753-ba3e-e4128189c8ae" containerName="placement-db-sync" Jan 04 12:09:52 crc kubenswrapper[5003]: E0104 12:09:52.203712 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6fea540-25d6-49c0-86f9-9476e70ceb93" containerName="neutron-db-sync" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.203717 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6fea540-25d6-49c0-86f9-9476e70ceb93" containerName="neutron-db-sync" Jan 04 12:09:52 crc kubenswrapper[5003]: E0104 12:09:52.203726 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09e11bbd-46d5-4fa2-97f1-75b8cdd3b263" containerName="keystone-bootstrap" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.203732 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="09e11bbd-46d5-4fa2-97f1-75b8cdd3b263" containerName="keystone-bootstrap" Jan 04 12:09:52 crc kubenswrapper[5003]: E0104 12:09:52.203743 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9372c572-7b81-4ee3-b766-20ca2345997e" containerName="glance-httpd" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.203751 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="9372c572-7b81-4ee3-b766-20ca2345997e" containerName="glance-httpd" Jan 04 12:09:52 crc kubenswrapper[5003]: E0104 12:09:52.203766 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9372c572-7b81-4ee3-b766-20ca2345997e" containerName="glance-log" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.203772 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="9372c572-7b81-4ee3-b766-20ca2345997e" containerName="glance-log" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.203921 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="48ca11e7-47b1-4362-aab7-6afd4617a783" containerName="dnsmasq-dns" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.203934 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6fea540-25d6-49c0-86f9-9476e70ceb93" containerName="neutron-db-sync" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.203940 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="09e11bbd-46d5-4fa2-97f1-75b8cdd3b263" containerName="keystone-bootstrap" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.203952 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="9372c572-7b81-4ee3-b766-20ca2345997e" containerName="glance-httpd" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.203961 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="9372c572-7b81-4ee3-b766-20ca2345997e" containerName="glance-log" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.203975 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="724a46c8-1096-4753-ba3e-e4128189c8ae" containerName="placement-db-sync" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.204597 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-85dcb76789-v5z7d" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.237245 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.237544 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-nsd8s" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.237679 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.237863 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.238002 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.238150 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.238148 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fc6d4ffc7-mthsh" event={"ID":"48ca11e7-47b1-4362-aab7-6afd4617a783","Type":"ContainerDied","Data":"e293b336334d9c9521376b52679d4a319e965f1083ac39e595f220a6a81b8077"} Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.238308 5003 scope.go:117] "RemoveContainer" containerID="b79a4330d066b26a3e4085250e920174e017ade9cc789618d6fc81183c09a8ab" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.238519 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fc6d4ffc7-mthsh" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.270869 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-5569cb5574-jt6r6"] Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.273150 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5569cb5574-jt6r6" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.282538 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-internal-tls-certs\") pod \"keystone-85dcb76789-v5z7d\" (UID: \"597b6841-5a72-4d8d-b2a6-dec279d628d0\") " pod="openstack/keystone-85dcb76789-v5z7d" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.284153 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ebba05f-e935-404f-85c0-4bd28a6afd28-internal-tls-certs\") pod \"placement-5569cb5574-jt6r6\" (UID: \"0ebba05f-e935-404f-85c0-4bd28a6afd28\") " pod="openstack/placement-5569cb5574-jt6r6" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.284253 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ebba05f-e935-404f-85c0-4bd28a6afd28-config-data\") pod \"placement-5569cb5574-jt6r6\" (UID: \"0ebba05f-e935-404f-85c0-4bd28a6afd28\") " pod="openstack/placement-5569cb5574-jt6r6" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.284353 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0ebba05f-e935-404f-85c0-4bd28a6afd28-logs\") pod \"placement-5569cb5574-jt6r6\" (UID: \"0ebba05f-e935-404f-85c0-4bd28a6afd28\") " pod="openstack/placement-5569cb5574-jt6r6" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.284398 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-config-data\") pod \"keystone-85dcb76789-v5z7d\" (UID: \"597b6841-5a72-4d8d-b2a6-dec279d628d0\") " pod="openstack/keystone-85dcb76789-v5z7d" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.284421 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-credential-keys\") pod \"keystone-85dcb76789-v5z7d\" (UID: \"597b6841-5a72-4d8d-b2a6-dec279d628d0\") " pod="openstack/keystone-85dcb76789-v5z7d" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.284469 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ebba05f-e935-404f-85c0-4bd28a6afd28-combined-ca-bundle\") pod \"placement-5569cb5574-jt6r6\" (UID: \"0ebba05f-e935-404f-85c0-4bd28a6afd28\") " pod="openstack/placement-5569cb5574-jt6r6" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.284499 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-combined-ca-bundle\") pod \"keystone-85dcb76789-v5z7d\" (UID: \"597b6841-5a72-4d8d-b2a6-dec279d628d0\") " pod="openstack/keystone-85dcb76789-v5z7d" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.284547 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ebba05f-e935-404f-85c0-4bd28a6afd28-scripts\") pod \"placement-5569cb5574-jt6r6\" (UID: \"0ebba05f-e935-404f-85c0-4bd28a6afd28\") " pod="openstack/placement-5569cb5574-jt6r6" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.284584 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-scripts\") pod \"keystone-85dcb76789-v5z7d\" (UID: \"597b6841-5a72-4d8d-b2a6-dec279d628d0\") " pod="openstack/keystone-85dcb76789-v5z7d" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.284681 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgqdq\" (UniqueName: \"kubernetes.io/projected/597b6841-5a72-4d8d-b2a6-dec279d628d0-kube-api-access-rgqdq\") pod \"keystone-85dcb76789-v5z7d\" (UID: \"597b6841-5a72-4d8d-b2a6-dec279d628d0\") " pod="openstack/keystone-85dcb76789-v5z7d" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.284767 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ebba05f-e935-404f-85c0-4bd28a6afd28-public-tls-certs\") pod \"placement-5569cb5574-jt6r6\" (UID: \"0ebba05f-e935-404f-85c0-4bd28a6afd28\") " pod="openstack/placement-5569cb5574-jt6r6" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.284809 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zz46\" (UniqueName: \"kubernetes.io/projected/0ebba05f-e935-404f-85c0-4bd28a6afd28-kube-api-access-4zz46\") pod \"placement-5569cb5574-jt6r6\" (UID: \"0ebba05f-e935-404f-85c0-4bd28a6afd28\") " pod="openstack/placement-5569cb5574-jt6r6" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.284849 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-public-tls-certs\") pod \"keystone-85dcb76789-v5z7d\" (UID: \"597b6841-5a72-4d8d-b2a6-dec279d628d0\") " pod="openstack/keystone-85dcb76789-v5z7d" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.284878 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-fernet-keys\") pod \"keystone-85dcb76789-v5z7d\" (UID: \"597b6841-5a72-4d8d-b2a6-dec279d628d0\") " pod="openstack/keystone-85dcb76789-v5z7d" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.309391 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-5w4cq" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.309616 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.309776 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.309842 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"821beb7d-9467-4756-8f10-c178e1bcc89e","Type":"ContainerStarted","Data":"bf16335052ed3530c6aa6a89f24742c9555210a833e62c66be2e8c4d8be9218b"} Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.309990 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.319679 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-85dcb76789-v5z7d"] Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.320331 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.321953 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"328505b1-7d4b-44ae-a5d1-f77f22f2e79c","Type":"ContainerStarted","Data":"b03e7d885801f8cce4056f2883bd7034d6edbed6543308c3dd26b0305ab4dfa2"} Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.322076 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"328505b1-7d4b-44ae-a5d1-f77f22f2e79c","Type":"ContainerStarted","Data":"1d8c5aad56d71c8fbbfcbdce0249adf35a6b858031352c9e96b1b3fb433bdd24"} Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.324177 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9372c572-7b81-4ee3-b766-20ca2345997e","Type":"ContainerDied","Data":"69d90b84644502974848df495427d0da191028519e4f2786deced6fbd632d86a"} Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.324373 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.327486 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5569cb5574-jt6r6"] Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.383499 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-685444497c-wclfh"] Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.386224 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ebba05f-e935-404f-85c0-4bd28a6afd28-scripts\") pod \"placement-5569cb5574-jt6r6\" (UID: \"0ebba05f-e935-404f-85c0-4bd28a6afd28\") " pod="openstack/placement-5569cb5574-jt6r6" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.386286 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-scripts\") pod \"keystone-85dcb76789-v5z7d\" (UID: \"597b6841-5a72-4d8d-b2a6-dec279d628d0\") " pod="openstack/keystone-85dcb76789-v5z7d" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.386331 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgqdq\" (UniqueName: \"kubernetes.io/projected/597b6841-5a72-4d8d-b2a6-dec279d628d0-kube-api-access-rgqdq\") pod \"keystone-85dcb76789-v5z7d\" (UID: \"597b6841-5a72-4d8d-b2a6-dec279d628d0\") " pod="openstack/keystone-85dcb76789-v5z7d" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.386365 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ebba05f-e935-404f-85c0-4bd28a6afd28-public-tls-certs\") pod \"placement-5569cb5574-jt6r6\" (UID: \"0ebba05f-e935-404f-85c0-4bd28a6afd28\") " pod="openstack/placement-5569cb5574-jt6r6" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.386388 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zz46\" (UniqueName: \"kubernetes.io/projected/0ebba05f-e935-404f-85c0-4bd28a6afd28-kube-api-access-4zz46\") pod \"placement-5569cb5574-jt6r6\" (UID: \"0ebba05f-e935-404f-85c0-4bd28a6afd28\") " pod="openstack/placement-5569cb5574-jt6r6" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.386408 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-public-tls-certs\") pod \"keystone-85dcb76789-v5z7d\" (UID: \"597b6841-5a72-4d8d-b2a6-dec279d628d0\") " pod="openstack/keystone-85dcb76789-v5z7d" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.386427 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-fernet-keys\") pod \"keystone-85dcb76789-v5z7d\" (UID: \"597b6841-5a72-4d8d-b2a6-dec279d628d0\") " pod="openstack/keystone-85dcb76789-v5z7d" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.386457 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-internal-tls-certs\") pod \"keystone-85dcb76789-v5z7d\" (UID: \"597b6841-5a72-4d8d-b2a6-dec279d628d0\") " pod="openstack/keystone-85dcb76789-v5z7d" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.386485 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ebba05f-e935-404f-85c0-4bd28a6afd28-internal-tls-certs\") pod \"placement-5569cb5574-jt6r6\" (UID: \"0ebba05f-e935-404f-85c0-4bd28a6afd28\") " pod="openstack/placement-5569cb5574-jt6r6" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.386512 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ebba05f-e935-404f-85c0-4bd28a6afd28-config-data\") pod \"placement-5569cb5574-jt6r6\" (UID: \"0ebba05f-e935-404f-85c0-4bd28a6afd28\") " pod="openstack/placement-5569cb5574-jt6r6" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.386540 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0ebba05f-e935-404f-85c0-4bd28a6afd28-logs\") pod \"placement-5569cb5574-jt6r6\" (UID: \"0ebba05f-e935-404f-85c0-4bd28a6afd28\") " pod="openstack/placement-5569cb5574-jt6r6" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.386557 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-config-data\") pod \"keystone-85dcb76789-v5z7d\" (UID: \"597b6841-5a72-4d8d-b2a6-dec279d628d0\") " pod="openstack/keystone-85dcb76789-v5z7d" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.386572 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-credential-keys\") pod \"keystone-85dcb76789-v5z7d\" (UID: \"597b6841-5a72-4d8d-b2a6-dec279d628d0\") " pod="openstack/keystone-85dcb76789-v5z7d" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.386594 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ebba05f-e935-404f-85c0-4bd28a6afd28-combined-ca-bundle\") pod \"placement-5569cb5574-jt6r6\" (UID: \"0ebba05f-e935-404f-85c0-4bd28a6afd28\") " pod="openstack/placement-5569cb5574-jt6r6" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.386616 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-combined-ca-bundle\") pod \"keystone-85dcb76789-v5z7d\" (UID: \"597b6841-5a72-4d8d-b2a6-dec279d628d0\") " pod="openstack/keystone-85dcb76789-v5z7d" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.388296 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0ebba05f-e935-404f-85c0-4bd28a6afd28-logs\") pod \"placement-5569cb5574-jt6r6\" (UID: \"0ebba05f-e935-404f-85c0-4bd28a6afd28\") " pod="openstack/placement-5569cb5574-jt6r6" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.393571 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-credential-keys\") pod \"keystone-85dcb76789-v5z7d\" (UID: \"597b6841-5a72-4d8d-b2a6-dec279d628d0\") " pod="openstack/keystone-85dcb76789-v5z7d" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.394404 5003 scope.go:117] "RemoveContainer" containerID="8558f57fae281d29a260431d8c45a1c224e2447ca0adf1b78d568b246aff7ae0" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.397760 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-public-tls-certs\") pod \"keystone-85dcb76789-v5z7d\" (UID: \"597b6841-5a72-4d8d-b2a6-dec279d628d0\") " pod="openstack/keystone-85dcb76789-v5z7d" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.403589 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-685444497c-wclfh" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.405679 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-combined-ca-bundle\") pod \"keystone-85dcb76789-v5z7d\" (UID: \"597b6841-5a72-4d8d-b2a6-dec279d628d0\") " pod="openstack/keystone-85dcb76789-v5z7d" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.407318 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ebba05f-e935-404f-85c0-4bd28a6afd28-internal-tls-certs\") pod \"placement-5569cb5574-jt6r6\" (UID: \"0ebba05f-e935-404f-85c0-4bd28a6afd28\") " pod="openstack/placement-5569cb5574-jt6r6" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.418627 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-scripts\") pod \"keystone-85dcb76789-v5z7d\" (UID: \"597b6841-5a72-4d8d-b2a6-dec279d628d0\") " pod="openstack/keystone-85dcb76789-v5z7d" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.419732 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-config-data\") pod \"keystone-85dcb76789-v5z7d\" (UID: \"597b6841-5a72-4d8d-b2a6-dec279d628d0\") " pod="openstack/keystone-85dcb76789-v5z7d" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.420673 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-fernet-keys\") pod \"keystone-85dcb76789-v5z7d\" (UID: \"597b6841-5a72-4d8d-b2a6-dec279d628d0\") " pod="openstack/keystone-85dcb76789-v5z7d" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.421169 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ebba05f-e935-404f-85c0-4bd28a6afd28-scripts\") pod \"placement-5569cb5574-jt6r6\" (UID: \"0ebba05f-e935-404f-85c0-4bd28a6afd28\") " pod="openstack/placement-5569cb5574-jt6r6" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.421465 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ebba05f-e935-404f-85c0-4bd28a6afd28-public-tls-certs\") pod \"placement-5569cb5574-jt6r6\" (UID: \"0ebba05f-e935-404f-85c0-4bd28a6afd28\") " pod="openstack/placement-5569cb5574-jt6r6" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.423032 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-internal-tls-certs\") pod \"keystone-85dcb76789-v5z7d\" (UID: \"597b6841-5a72-4d8d-b2a6-dec279d628d0\") " pod="openstack/keystone-85dcb76789-v5z7d" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.428759 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zz46\" (UniqueName: \"kubernetes.io/projected/0ebba05f-e935-404f-85c0-4bd28a6afd28-kube-api-access-4zz46\") pod \"placement-5569cb5574-jt6r6\" (UID: \"0ebba05f-e935-404f-85c0-4bd28a6afd28\") " pod="openstack/placement-5569cb5574-jt6r6" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.436277 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ebba05f-e935-404f-85c0-4bd28a6afd28-combined-ca-bundle\") pod \"placement-5569cb5574-jt6r6\" (UID: \"0ebba05f-e935-404f-85c0-4bd28a6afd28\") " pod="openstack/placement-5569cb5574-jt6r6" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.447042 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ebba05f-e935-404f-85c0-4bd28a6afd28-config-data\") pod \"placement-5569cb5574-jt6r6\" (UID: \"0ebba05f-e935-404f-85c0-4bd28a6afd28\") " pod="openstack/placement-5569cb5574-jt6r6" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.450928 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgqdq\" (UniqueName: \"kubernetes.io/projected/597b6841-5a72-4d8d-b2a6-dec279d628d0-kube-api-access-rgqdq\") pod \"keystone-85dcb76789-v5z7d\" (UID: \"597b6841-5a72-4d8d-b2a6-dec279d628d0\") " pod="openstack/keystone-85dcb76789-v5z7d" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.457144 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-685444497c-wclfh"] Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.475429 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.493545 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-config\") pod \"dnsmasq-dns-685444497c-wclfh\" (UID: \"6b768613-0d91-4d90-a07f-7d2cdb33c3e3\") " pod="openstack/dnsmasq-dns-685444497c-wclfh" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.493941 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-dns-swift-storage-0\") pod \"dnsmasq-dns-685444497c-wclfh\" (UID: \"6b768613-0d91-4d90-a07f-7d2cdb33c3e3\") " pod="openstack/dnsmasq-dns-685444497c-wclfh" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.494107 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhzwg\" (UniqueName: \"kubernetes.io/projected/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-kube-api-access-mhzwg\") pod \"dnsmasq-dns-685444497c-wclfh\" (UID: \"6b768613-0d91-4d90-a07f-7d2cdb33c3e3\") " pod="openstack/dnsmasq-dns-685444497c-wclfh" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.494304 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-ovsdbserver-sb\") pod \"dnsmasq-dns-685444497c-wclfh\" (UID: \"6b768613-0d91-4d90-a07f-7d2cdb33c3e3\") " pod="openstack/dnsmasq-dns-685444497c-wclfh" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.494343 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-ovsdbserver-nb\") pod \"dnsmasq-dns-685444497c-wclfh\" (UID: \"6b768613-0d91-4d90-a07f-7d2cdb33c3e3\") " pod="openstack/dnsmasq-dns-685444497c-wclfh" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.494380 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-dns-svc\") pod \"dnsmasq-dns-685444497c-wclfh\" (UID: \"6b768613-0d91-4d90-a07f-7d2cdb33c3e3\") " pod="openstack/dnsmasq-dns-685444497c-wclfh" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.503390 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.510502 5003 scope.go:117] "RemoveContainer" containerID="239d49624ddcc45308caa188e3b977b2e23593fa49dfd088c1185e8de51d8d23" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.510636 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fc6d4ffc7-mthsh"] Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.521610 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.526202 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.529645 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.530753 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.562558 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7fc6d4ffc7-mthsh"] Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.585341 5003 scope.go:117] "RemoveContainer" containerID="c62d8449456260fbf100deabd2ae19670c3a5bcd1ee2175b812d4316e4cf4397" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.593271 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.596633 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31a5af52-3095-473b-96cd-c3531d5569bb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.596697 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-ovsdbserver-sb\") pod \"dnsmasq-dns-685444497c-wclfh\" (UID: \"6b768613-0d91-4d90-a07f-7d2cdb33c3e3\") " pod="openstack/dnsmasq-dns-685444497c-wclfh" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.596726 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-ovsdbserver-nb\") pod \"dnsmasq-dns-685444497c-wclfh\" (UID: \"6b768613-0d91-4d90-a07f-7d2cdb33c3e3\") " pod="openstack/dnsmasq-dns-685444497c-wclfh" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.596746 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-dns-svc\") pod \"dnsmasq-dns-685444497c-wclfh\" (UID: \"6b768613-0d91-4d90-a07f-7d2cdb33c3e3\") " pod="openstack/dnsmasq-dns-685444497c-wclfh" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.596766 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/31a5af52-3095-473b-96cd-c3531d5569bb-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.596785 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/31a5af52-3095-473b-96cd-c3531d5569bb-logs\") pod \"glance-default-internal-api-0\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.596811 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31a5af52-3095-473b-96cd-c3531d5569bb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.596829 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.596847 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/31a5af52-3095-473b-96cd-c3531d5569bb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.596874 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-config\") pod \"dnsmasq-dns-685444497c-wclfh\" (UID: \"6b768613-0d91-4d90-a07f-7d2cdb33c3e3\") " pod="openstack/dnsmasq-dns-685444497c-wclfh" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.596896 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgtjc\" (UniqueName: \"kubernetes.io/projected/31a5af52-3095-473b-96cd-c3531d5569bb-kube-api-access-mgtjc\") pod \"glance-default-internal-api-0\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.596911 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31a5af52-3095-473b-96cd-c3531d5569bb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.596969 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-dns-swift-storage-0\") pod \"dnsmasq-dns-685444497c-wclfh\" (UID: \"6b768613-0d91-4d90-a07f-7d2cdb33c3e3\") " pod="openstack/dnsmasq-dns-685444497c-wclfh" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.596996 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhzwg\" (UniqueName: \"kubernetes.io/projected/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-kube-api-access-mhzwg\") pod \"dnsmasq-dns-685444497c-wclfh\" (UID: \"6b768613-0d91-4d90-a07f-7d2cdb33c3e3\") " pod="openstack/dnsmasq-dns-685444497c-wclfh" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.601918 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-ovsdbserver-nb\") pod \"dnsmasq-dns-685444497c-wclfh\" (UID: \"6b768613-0d91-4d90-a07f-7d2cdb33c3e3\") " pod="openstack/dnsmasq-dns-685444497c-wclfh" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.602341 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-ovsdbserver-sb\") pod \"dnsmasq-dns-685444497c-wclfh\" (UID: \"6b768613-0d91-4d90-a07f-7d2cdb33c3e3\") " pod="openstack/dnsmasq-dns-685444497c-wclfh" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.602638 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-dns-svc\") pod \"dnsmasq-dns-685444497c-wclfh\" (UID: \"6b768613-0d91-4d90-a07f-7d2cdb33c3e3\") " pod="openstack/dnsmasq-dns-685444497c-wclfh" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.603189 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-config\") pod \"dnsmasq-dns-685444497c-wclfh\" (UID: \"6b768613-0d91-4d90-a07f-7d2cdb33c3e3\") " pod="openstack/dnsmasq-dns-685444497c-wclfh" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.604605 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-dns-swift-storage-0\") pod \"dnsmasq-dns-685444497c-wclfh\" (UID: \"6b768613-0d91-4d90-a07f-7d2cdb33c3e3\") " pod="openstack/dnsmasq-dns-685444497c-wclfh" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.624972 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhzwg\" (UniqueName: \"kubernetes.io/projected/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-kube-api-access-mhzwg\") pod \"dnsmasq-dns-685444497c-wclfh\" (UID: \"6b768613-0d91-4d90-a07f-7d2cdb33c3e3\") " pod="openstack/dnsmasq-dns-685444497c-wclfh" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.650529 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7848f7888d-tzm72"] Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.652120 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7848f7888d-tzm72" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.661282 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-zx24b" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.661371 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.661482 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.661524 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.675589 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-85dcb76789-v5z7d" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.682190 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7848f7888d-tzm72"] Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.689771 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5569cb5574-jt6r6" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.699918 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6779a32-42c6-498d-9968-c98329f48aef-combined-ca-bundle\") pod \"neutron-7848f7888d-tzm72\" (UID: \"c6779a32-42c6-498d-9968-c98329f48aef\") " pod="openstack/neutron-7848f7888d-tzm72" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.700066 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31a5af52-3095-473b-96cd-c3531d5569bb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.700147 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/31a5af52-3095-473b-96cd-c3531d5569bb-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.700170 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c6779a32-42c6-498d-9968-c98329f48aef-config\") pod \"neutron-7848f7888d-tzm72\" (UID: \"c6779a32-42c6-498d-9968-c98329f48aef\") " pod="openstack/neutron-7848f7888d-tzm72" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.700203 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/31a5af52-3095-473b-96cd-c3531d5569bb-logs\") pod \"glance-default-internal-api-0\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.700238 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31a5af52-3095-473b-96cd-c3531d5569bb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.700264 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.700293 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/31a5af52-3095-473b-96cd-c3531d5569bb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.700328 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c6779a32-42c6-498d-9968-c98329f48aef-httpd-config\") pod \"neutron-7848f7888d-tzm72\" (UID: \"c6779a32-42c6-498d-9968-c98329f48aef\") " pod="openstack/neutron-7848f7888d-tzm72" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.700926 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/31a5af52-3095-473b-96cd-c3531d5569bb-logs\") pod \"glance-default-internal-api-0\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.700993 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-internal-api-0" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.702733 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/31a5af52-3095-473b-96cd-c3531d5569bb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.702847 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgtjc\" (UniqueName: \"kubernetes.io/projected/31a5af52-3095-473b-96cd-c3531d5569bb-kube-api-access-mgtjc\") pod \"glance-default-internal-api-0\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.702872 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6779a32-42c6-498d-9968-c98329f48aef-ovndb-tls-certs\") pod \"neutron-7848f7888d-tzm72\" (UID: \"c6779a32-42c6-498d-9968-c98329f48aef\") " pod="openstack/neutron-7848f7888d-tzm72" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.702895 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31a5af52-3095-473b-96cd-c3531d5569bb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.702953 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wc6f9\" (UniqueName: \"kubernetes.io/projected/c6779a32-42c6-498d-9968-c98329f48aef-kube-api-access-wc6f9\") pod \"neutron-7848f7888d-tzm72\" (UID: \"c6779a32-42c6-498d-9968-c98329f48aef\") " pod="openstack/neutron-7848f7888d-tzm72" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.711701 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31a5af52-3095-473b-96cd-c3531d5569bb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.712101 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31a5af52-3095-473b-96cd-c3531d5569bb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.715495 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/31a5af52-3095-473b-96cd-c3531d5569bb-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.718658 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31a5af52-3095-473b-96cd-c3531d5569bb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.727281 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgtjc\" (UniqueName: \"kubernetes.io/projected/31a5af52-3095-473b-96cd-c3531d5569bb-kube-api-access-mgtjc\") pod \"glance-default-internal-api-0\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.790542 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-685444497c-wclfh" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.806225 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6779a32-42c6-498d-9968-c98329f48aef-combined-ca-bundle\") pod \"neutron-7848f7888d-tzm72\" (UID: \"c6779a32-42c6-498d-9968-c98329f48aef\") " pod="openstack/neutron-7848f7888d-tzm72" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.806319 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c6779a32-42c6-498d-9968-c98329f48aef-config\") pod \"neutron-7848f7888d-tzm72\" (UID: \"c6779a32-42c6-498d-9968-c98329f48aef\") " pod="openstack/neutron-7848f7888d-tzm72" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.806384 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c6779a32-42c6-498d-9968-c98329f48aef-httpd-config\") pod \"neutron-7848f7888d-tzm72\" (UID: \"c6779a32-42c6-498d-9968-c98329f48aef\") " pod="openstack/neutron-7848f7888d-tzm72" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.806420 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6779a32-42c6-498d-9968-c98329f48aef-ovndb-tls-certs\") pod \"neutron-7848f7888d-tzm72\" (UID: \"c6779a32-42c6-498d-9968-c98329f48aef\") " pod="openstack/neutron-7848f7888d-tzm72" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.806450 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wc6f9\" (UniqueName: \"kubernetes.io/projected/c6779a32-42c6-498d-9968-c98329f48aef-kube-api-access-wc6f9\") pod \"neutron-7848f7888d-tzm72\" (UID: \"c6779a32-42c6-498d-9968-c98329f48aef\") " pod="openstack/neutron-7848f7888d-tzm72" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.839193 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6779a32-42c6-498d-9968-c98329f48aef-ovndb-tls-certs\") pod \"neutron-7848f7888d-tzm72\" (UID: \"c6779a32-42c6-498d-9968-c98329f48aef\") " pod="openstack/neutron-7848f7888d-tzm72" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.839627 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wc6f9\" (UniqueName: \"kubernetes.io/projected/c6779a32-42c6-498d-9968-c98329f48aef-kube-api-access-wc6f9\") pod \"neutron-7848f7888d-tzm72\" (UID: \"c6779a32-42c6-498d-9968-c98329f48aef\") " pod="openstack/neutron-7848f7888d-tzm72" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.847848 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/c6779a32-42c6-498d-9968-c98329f48aef-config\") pod \"neutron-7848f7888d-tzm72\" (UID: \"c6779a32-42c6-498d-9968-c98329f48aef\") " pod="openstack/neutron-7848f7888d-tzm72" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.854324 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c6779a32-42c6-498d-9968-c98329f48aef-httpd-config\") pod \"neutron-7848f7888d-tzm72\" (UID: \"c6779a32-42c6-498d-9968-c98329f48aef\") " pod="openstack/neutron-7848f7888d-tzm72" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.867943 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6779a32-42c6-498d-9968-c98329f48aef-combined-ca-bundle\") pod \"neutron-7848f7888d-tzm72\" (UID: \"c6779a32-42c6-498d-9968-c98329f48aef\") " pod="openstack/neutron-7848f7888d-tzm72" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.872051 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48ca11e7-47b1-4362-aab7-6afd4617a783" path="/var/lib/kubelet/pods/48ca11e7-47b1-4362-aab7-6afd4617a783/volumes" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.872820 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9372c572-7b81-4ee3-b766-20ca2345997e" path="/var/lib/kubelet/pods/9372c572-7b81-4ee3-b766-20ca2345997e/volumes" Jan 04 12:09:52 crc kubenswrapper[5003]: I0104 12:09:52.946378 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:09:53 crc kubenswrapper[5003]: I0104 12:09:52.993739 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7848f7888d-tzm72" Jan 04 12:09:53 crc kubenswrapper[5003]: I0104 12:09:53.184174 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 04 12:09:53 crc kubenswrapper[5003]: I0104 12:09:53.492584 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-685444497c-wclfh"] Jan 04 12:09:53 crc kubenswrapper[5003]: I0104 12:09:53.571007 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-85dcb76789-v5z7d"] Jan 04 12:09:53 crc kubenswrapper[5003]: I0104 12:09:53.660111 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5569cb5574-jt6r6"] Jan 04 12:09:53 crc kubenswrapper[5003]: I0104 12:09:53.867419 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:09:53 crc kubenswrapper[5003]: W0104 12:09:53.911256 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod31a5af52_3095_473b_96cd_c3531d5569bb.slice/crio-49453d0030b3d98de1edc94ac0b653b6a1c772ee3690bc20b808b58d02591ca9 WatchSource:0}: Error finding container 49453d0030b3d98de1edc94ac0b653b6a1c772ee3690bc20b808b58d02591ca9: Status 404 returned error can't find the container with id 49453d0030b3d98de1edc94ac0b653b6a1c772ee3690bc20b808b58d02591ca9 Jan 04 12:09:54 crc kubenswrapper[5003]: I0104 12:09:54.393209 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5569cb5574-jt6r6" event={"ID":"0ebba05f-e935-404f-85c0-4bd28a6afd28","Type":"ContainerStarted","Data":"7c4f0d8d5cf985bf4872ff6425b0a428694655251809cbbd7b02ebfb5aeb3a85"} Jan 04 12:09:54 crc kubenswrapper[5003]: I0104 12:09:54.393648 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5569cb5574-jt6r6" event={"ID":"0ebba05f-e935-404f-85c0-4bd28a6afd28","Type":"ContainerStarted","Data":"23e4d9f262f1f06ccd229700f7fd9eccf5dd9ad583a531de69d8baaa59558f7e"} Jan 04 12:09:54 crc kubenswrapper[5003]: I0104 12:09:54.409874 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"328505b1-7d4b-44ae-a5d1-f77f22f2e79c","Type":"ContainerStarted","Data":"7ab74bdd5825ef110386c1d6b4e891b78cb32a067feea2f27d8c553d6f1324ed"} Jan 04 12:09:54 crc kubenswrapper[5003]: I0104 12:09:54.412727 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"31a5af52-3095-473b-96cd-c3531d5569bb","Type":"ContainerStarted","Data":"49453d0030b3d98de1edc94ac0b653b6a1c772ee3690bc20b808b58d02591ca9"} Jan 04 12:09:54 crc kubenswrapper[5003]: I0104 12:09:54.416633 5003 generic.go:334] "Generic (PLEG): container finished" podID="6b768613-0d91-4d90-a07f-7d2cdb33c3e3" containerID="f420016660caf2d2efbf8ef172ccd25f86d71e13a069ec2cfd1f3fddb218bbeb" exitCode=0 Jan 04 12:09:54 crc kubenswrapper[5003]: I0104 12:09:54.416855 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-685444497c-wclfh" event={"ID":"6b768613-0d91-4d90-a07f-7d2cdb33c3e3","Type":"ContainerDied","Data":"f420016660caf2d2efbf8ef172ccd25f86d71e13a069ec2cfd1f3fddb218bbeb"} Jan 04 12:09:54 crc kubenswrapper[5003]: I0104 12:09:54.416886 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-685444497c-wclfh" event={"ID":"6b768613-0d91-4d90-a07f-7d2cdb33c3e3","Type":"ContainerStarted","Data":"a4e8516a8d916db0cf7b58ee45498da76a7c5a1d0b05dc54ae295c6f12e2ed12"} Jan 04 12:09:54 crc kubenswrapper[5003]: I0104 12:09:54.424424 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-85dcb76789-v5z7d" event={"ID":"597b6841-5a72-4d8d-b2a6-dec279d628d0","Type":"ContainerStarted","Data":"f6abe0f83c7bb707281a5925c06f33a9019ba1df8b444b347dc26031613596ff"} Jan 04 12:09:54 crc kubenswrapper[5003]: I0104 12:09:54.424452 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-85dcb76789-v5z7d" event={"ID":"597b6841-5a72-4d8d-b2a6-dec279d628d0","Type":"ContainerStarted","Data":"a179d2863e7e105f110032d64f4967a2a2c5e068f56a480a1bdea2a9c9d5831f"} Jan 04 12:09:54 crc kubenswrapper[5003]: I0104 12:09:54.424857 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-85dcb76789-v5z7d" Jan 04 12:09:54 crc kubenswrapper[5003]: I0104 12:09:54.499937 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=7.499914359 podStartE2EDuration="7.499914359s" podCreationTimestamp="2026-01-04 12:09:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:54.449318587 +0000 UTC m=+1309.922348428" watchObservedRunningTime="2026-01-04 12:09:54.499914359 +0000 UTC m=+1309.972944210" Jan 04 12:09:54 crc kubenswrapper[5003]: I0104 12:09:54.522512 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-85dcb76789-v5z7d" podStartSLOduration=2.522490349 podStartE2EDuration="2.522490349s" podCreationTimestamp="2026-01-04 12:09:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:54.509949671 +0000 UTC m=+1309.982979512" watchObservedRunningTime="2026-01-04 12:09:54.522490349 +0000 UTC m=+1309.995520190" Jan 04 12:09:54 crc kubenswrapper[5003]: I0104 12:09:54.736825 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7848f7888d-tzm72"] Jan 04 12:09:54 crc kubenswrapper[5003]: E0104 12:09:54.857298 5003 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96d2524c_4772_4eb6_b108_0513fce70ad8.slice/crio-1e67620f4c61ca7aa65a88f0a18d490a091db6475ec2f8c6bb6c265ee4ccf7ef\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96d2524c_4772_4eb6_b108_0513fce70ad8.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc76d3acd_e992_42b8_8dcc_0f5f9ddbd02a.slice\": RecentStats: unable to find data in memory cache]" Jan 04 12:09:55 crc kubenswrapper[5003]: I0104 12:09:55.435356 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5569cb5574-jt6r6" event={"ID":"0ebba05f-e935-404f-85c0-4bd28a6afd28","Type":"ContainerStarted","Data":"e10ea013e6db1ec608b48d073081c9a1e3b0565542f8409e8438d07885d6f975"} Jan 04 12:09:55 crc kubenswrapper[5003]: I0104 12:09:55.436043 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-5569cb5574-jt6r6" Jan 04 12:09:55 crc kubenswrapper[5003]: I0104 12:09:55.437123 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7848f7888d-tzm72" event={"ID":"c6779a32-42c6-498d-9968-c98329f48aef","Type":"ContainerStarted","Data":"78f6805dca0f17b804a34365736d2026d1121c9402b49f8637a3d96f10c5abba"} Jan 04 12:09:55 crc kubenswrapper[5003]: I0104 12:09:55.439119 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"31a5af52-3095-473b-96cd-c3531d5569bb","Type":"ContainerStarted","Data":"6ab8102445d68e771f1e3110b6fed506d60e8e6a9bf0cef1b20b4a966904c008"} Jan 04 12:09:55 crc kubenswrapper[5003]: I0104 12:09:55.441232 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-685444497c-wclfh" event={"ID":"6b768613-0d91-4d90-a07f-7d2cdb33c3e3","Type":"ContainerStarted","Data":"b60119d81be9fc4f99fe8bd185446cae74b10c1f7ded466e547d20803cd8d95d"} Jan 04 12:09:55 crc kubenswrapper[5003]: I0104 12:09:55.472574 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-5569cb5574-jt6r6" podStartSLOduration=3.47255317 podStartE2EDuration="3.47255317s" podCreationTimestamp="2026-01-04 12:09:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:55.463560095 +0000 UTC m=+1310.936589946" watchObservedRunningTime="2026-01-04 12:09:55.47255317 +0000 UTC m=+1310.945583021" Jan 04 12:09:56 crc kubenswrapper[5003]: I0104 12:09:56.450663 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-5569cb5574-jt6r6" Jan 04 12:09:56 crc kubenswrapper[5003]: I0104 12:09:56.476475 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-685444497c-wclfh" podStartSLOduration=4.476457358 podStartE2EDuration="4.476457358s" podCreationTimestamp="2026-01-04 12:09:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:56.471027496 +0000 UTC m=+1311.944057337" watchObservedRunningTime="2026-01-04 12:09:56.476457358 +0000 UTC m=+1311.949487189" Jan 04 12:09:56 crc kubenswrapper[5003]: I0104 12:09:56.502250 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7d88cc4d8f-5mhr4"] Jan 04 12:09:56 crc kubenswrapper[5003]: I0104 12:09:56.503795 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7d88cc4d8f-5mhr4" Jan 04 12:09:56 crc kubenswrapper[5003]: I0104 12:09:56.505916 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Jan 04 12:09:56 crc kubenswrapper[5003]: I0104 12:09:56.506097 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Jan 04 12:09:56 crc kubenswrapper[5003]: I0104 12:09:56.511518 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7d88cc4d8f-5mhr4"] Jan 04 12:09:56 crc kubenswrapper[5003]: I0104 12:09:56.617511 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-combined-ca-bundle\") pod \"neutron-7d88cc4d8f-5mhr4\" (UID: \"23de8292-dc91-45db-8de9-59933352e3f2\") " pod="openstack/neutron-7d88cc4d8f-5mhr4" Jan 04 12:09:56 crc kubenswrapper[5003]: I0104 12:09:56.617556 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-internal-tls-certs\") pod \"neutron-7d88cc4d8f-5mhr4\" (UID: \"23de8292-dc91-45db-8de9-59933352e3f2\") " pod="openstack/neutron-7d88cc4d8f-5mhr4" Jan 04 12:09:56 crc kubenswrapper[5003]: I0104 12:09:56.617646 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxcgv\" (UniqueName: \"kubernetes.io/projected/23de8292-dc91-45db-8de9-59933352e3f2-kube-api-access-qxcgv\") pod \"neutron-7d88cc4d8f-5mhr4\" (UID: \"23de8292-dc91-45db-8de9-59933352e3f2\") " pod="openstack/neutron-7d88cc4d8f-5mhr4" Jan 04 12:09:56 crc kubenswrapper[5003]: I0104 12:09:56.617744 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-public-tls-certs\") pod \"neutron-7d88cc4d8f-5mhr4\" (UID: \"23de8292-dc91-45db-8de9-59933352e3f2\") " pod="openstack/neutron-7d88cc4d8f-5mhr4" Jan 04 12:09:56 crc kubenswrapper[5003]: I0104 12:09:56.617779 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-config\") pod \"neutron-7d88cc4d8f-5mhr4\" (UID: \"23de8292-dc91-45db-8de9-59933352e3f2\") " pod="openstack/neutron-7d88cc4d8f-5mhr4" Jan 04 12:09:56 crc kubenswrapper[5003]: I0104 12:09:56.617820 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-httpd-config\") pod \"neutron-7d88cc4d8f-5mhr4\" (UID: \"23de8292-dc91-45db-8de9-59933352e3f2\") " pod="openstack/neutron-7d88cc4d8f-5mhr4" Jan 04 12:09:56 crc kubenswrapper[5003]: I0104 12:09:56.617855 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-ovndb-tls-certs\") pod \"neutron-7d88cc4d8f-5mhr4\" (UID: \"23de8292-dc91-45db-8de9-59933352e3f2\") " pod="openstack/neutron-7d88cc4d8f-5mhr4" Jan 04 12:09:56 crc kubenswrapper[5003]: I0104 12:09:56.719303 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-config\") pod \"neutron-7d88cc4d8f-5mhr4\" (UID: \"23de8292-dc91-45db-8de9-59933352e3f2\") " pod="openstack/neutron-7d88cc4d8f-5mhr4" Jan 04 12:09:56 crc kubenswrapper[5003]: I0104 12:09:56.719360 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-httpd-config\") pod \"neutron-7d88cc4d8f-5mhr4\" (UID: \"23de8292-dc91-45db-8de9-59933352e3f2\") " pod="openstack/neutron-7d88cc4d8f-5mhr4" Jan 04 12:09:56 crc kubenswrapper[5003]: I0104 12:09:56.719391 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-ovndb-tls-certs\") pod \"neutron-7d88cc4d8f-5mhr4\" (UID: \"23de8292-dc91-45db-8de9-59933352e3f2\") " pod="openstack/neutron-7d88cc4d8f-5mhr4" Jan 04 12:09:56 crc kubenswrapper[5003]: I0104 12:09:56.719425 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-combined-ca-bundle\") pod \"neutron-7d88cc4d8f-5mhr4\" (UID: \"23de8292-dc91-45db-8de9-59933352e3f2\") " pod="openstack/neutron-7d88cc4d8f-5mhr4" Jan 04 12:09:56 crc kubenswrapper[5003]: I0104 12:09:56.719442 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-internal-tls-certs\") pod \"neutron-7d88cc4d8f-5mhr4\" (UID: \"23de8292-dc91-45db-8de9-59933352e3f2\") " pod="openstack/neutron-7d88cc4d8f-5mhr4" Jan 04 12:09:56 crc kubenswrapper[5003]: I0104 12:09:56.719500 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxcgv\" (UniqueName: \"kubernetes.io/projected/23de8292-dc91-45db-8de9-59933352e3f2-kube-api-access-qxcgv\") pod \"neutron-7d88cc4d8f-5mhr4\" (UID: \"23de8292-dc91-45db-8de9-59933352e3f2\") " pod="openstack/neutron-7d88cc4d8f-5mhr4" Jan 04 12:09:56 crc kubenswrapper[5003]: I0104 12:09:56.719565 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-public-tls-certs\") pod \"neutron-7d88cc4d8f-5mhr4\" (UID: \"23de8292-dc91-45db-8de9-59933352e3f2\") " pod="openstack/neutron-7d88cc4d8f-5mhr4" Jan 04 12:09:56 crc kubenswrapper[5003]: I0104 12:09:56.726658 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-public-tls-certs\") pod \"neutron-7d88cc4d8f-5mhr4\" (UID: \"23de8292-dc91-45db-8de9-59933352e3f2\") " pod="openstack/neutron-7d88cc4d8f-5mhr4" Jan 04 12:09:56 crc kubenswrapper[5003]: I0104 12:09:56.726758 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-ovndb-tls-certs\") pod \"neutron-7d88cc4d8f-5mhr4\" (UID: \"23de8292-dc91-45db-8de9-59933352e3f2\") " pod="openstack/neutron-7d88cc4d8f-5mhr4" Jan 04 12:09:56 crc kubenswrapper[5003]: I0104 12:09:56.727399 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-config\") pod \"neutron-7d88cc4d8f-5mhr4\" (UID: \"23de8292-dc91-45db-8de9-59933352e3f2\") " pod="openstack/neutron-7d88cc4d8f-5mhr4" Jan 04 12:09:56 crc kubenswrapper[5003]: I0104 12:09:56.728940 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-httpd-config\") pod \"neutron-7d88cc4d8f-5mhr4\" (UID: \"23de8292-dc91-45db-8de9-59933352e3f2\") " pod="openstack/neutron-7d88cc4d8f-5mhr4" Jan 04 12:09:56 crc kubenswrapper[5003]: I0104 12:09:56.737667 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-combined-ca-bundle\") pod \"neutron-7d88cc4d8f-5mhr4\" (UID: \"23de8292-dc91-45db-8de9-59933352e3f2\") " pod="openstack/neutron-7d88cc4d8f-5mhr4" Jan 04 12:09:56 crc kubenswrapper[5003]: I0104 12:09:56.737948 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-internal-tls-certs\") pod \"neutron-7d88cc4d8f-5mhr4\" (UID: \"23de8292-dc91-45db-8de9-59933352e3f2\") " pod="openstack/neutron-7d88cc4d8f-5mhr4" Jan 04 12:09:56 crc kubenswrapper[5003]: I0104 12:09:56.744844 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxcgv\" (UniqueName: \"kubernetes.io/projected/23de8292-dc91-45db-8de9-59933352e3f2-kube-api-access-qxcgv\") pod \"neutron-7d88cc4d8f-5mhr4\" (UID: \"23de8292-dc91-45db-8de9-59933352e3f2\") " pod="openstack/neutron-7d88cc4d8f-5mhr4" Jan 04 12:09:56 crc kubenswrapper[5003]: I0104 12:09:56.819620 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7d88cc4d8f-5mhr4" Jan 04 12:09:57 crc kubenswrapper[5003]: I0104 12:09:57.435384 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7d88cc4d8f-5mhr4"] Jan 04 12:09:57 crc kubenswrapper[5003]: I0104 12:09:57.470480 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7848f7888d-tzm72" event={"ID":"c6779a32-42c6-498d-9968-c98329f48aef","Type":"ContainerStarted","Data":"9c35ddc623b936059247b0c706034f980ad414046e6deaeb3798d3d273938156"} Jan 04 12:09:57 crc kubenswrapper[5003]: I0104 12:09:57.472989 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"31a5af52-3095-473b-96cd-c3531d5569bb","Type":"ContainerStarted","Data":"d716c1374868df135ac92dd8a73e7788bc27a6e38d3942e86c10ad154a2bf75e"} Jan 04 12:09:57 crc kubenswrapper[5003]: I0104 12:09:57.498487 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 04 12:09:57 crc kubenswrapper[5003]: I0104 12:09:57.498538 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 04 12:09:57 crc kubenswrapper[5003]: I0104 12:09:57.530353 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 04 12:09:57 crc kubenswrapper[5003]: I0104 12:09:57.555323 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 04 12:09:57 crc kubenswrapper[5003]: I0104 12:09:57.557066 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.55705516 podStartE2EDuration="5.55705516s" podCreationTimestamp="2026-01-04 12:09:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:09:57.501670392 +0000 UTC m=+1312.974700313" watchObservedRunningTime="2026-01-04 12:09:57.55705516 +0000 UTC m=+1313.030085001" Jan 04 12:09:57 crc kubenswrapper[5003]: I0104 12:09:57.795090 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-685444497c-wclfh" Jan 04 12:09:58 crc kubenswrapper[5003]: I0104 12:09:58.481286 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 04 12:09:58 crc kubenswrapper[5003]: I0104 12:09:58.481323 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 04 12:10:00 crc kubenswrapper[5003]: I0104 12:10:00.657173 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 04 12:10:00 crc kubenswrapper[5003]: I0104 12:10:00.658048 5003 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 04 12:10:00 crc kubenswrapper[5003]: I0104 12:10:00.664059 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 04 12:10:01 crc kubenswrapper[5003]: I0104 12:10:01.510232 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7848f7888d-tzm72" event={"ID":"c6779a32-42c6-498d-9968-c98329f48aef","Type":"ContainerStarted","Data":"d40d337b1299b5bdb3678382921b077373f9e1da97c340de5934fbaff16d595d"} Jan 04 12:10:01 crc kubenswrapper[5003]: I0104 12:10:01.510678 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-7848f7888d-tzm72" Jan 04 12:10:01 crc kubenswrapper[5003]: I0104 12:10:01.511628 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7d88cc4d8f-5mhr4" event={"ID":"23de8292-dc91-45db-8de9-59933352e3f2","Type":"ContainerStarted","Data":"3a5a5bf263a0abaa533ab10ae25924784d2ab8bae9187c77b0bc156a44ef4ba6"} Jan 04 12:10:01 crc kubenswrapper[5003]: I0104 12:10:01.535523 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-7848f7888d-tzm72" podStartSLOduration=9.535497508 podStartE2EDuration="9.535497508s" podCreationTimestamp="2026-01-04 12:09:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:10:01.533328281 +0000 UTC m=+1317.006358162" watchObservedRunningTime="2026-01-04 12:10:01.535497508 +0000 UTC m=+1317.008527349" Jan 04 12:10:02 crc kubenswrapper[5003]: I0104 12:10:02.524915 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-2zm85" event={"ID":"b47fe1da-877a-4d5e-a21b-c7955bd00b30","Type":"ContainerStarted","Data":"3d6ec01453584d54c02c4502ffaf6568f5d140cf80440af350ba37f5fa541403"} Jan 04 12:10:02 crc kubenswrapper[5003]: I0104 12:10:02.530681 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-qx4js" event={"ID":"c2b0b250-33ee-45aa-baa8-1540d9f39b03","Type":"ContainerStarted","Data":"1ba8bece994985b44dcf19ff5cc432386d9da5fb3a4f2e1cfa6a0f8ee7427c14"} Jan 04 12:10:02 crc kubenswrapper[5003]: I0104 12:10:02.540276 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"821beb7d-9467-4756-8f10-c178e1bcc89e","Type":"ContainerStarted","Data":"f9a671235c548b0fdcd3bbfa106e8b817f91754c8210a710756d753524f1bfac"} Jan 04 12:10:02 crc kubenswrapper[5003]: I0104 12:10:02.540601 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="821beb7d-9467-4756-8f10-c178e1bcc89e" containerName="ceilometer-central-agent" containerID="cri-o://c085d569914ded5d944b3467b6ebb0f3080261379a66abf62ca6879f362fc88c" gracePeriod=30 Jan 04 12:10:02 crc kubenswrapper[5003]: I0104 12:10:02.540904 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 04 12:10:02 crc kubenswrapper[5003]: I0104 12:10:02.541048 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="821beb7d-9467-4756-8f10-c178e1bcc89e" containerName="proxy-httpd" containerID="cri-o://f9a671235c548b0fdcd3bbfa106e8b817f91754c8210a710756d753524f1bfac" gracePeriod=30 Jan 04 12:10:02 crc kubenswrapper[5003]: I0104 12:10:02.541166 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="821beb7d-9467-4756-8f10-c178e1bcc89e" containerName="sg-core" containerID="cri-o://bf16335052ed3530c6aa6a89f24742c9555210a833e62c66be2e8c4d8be9218b" gracePeriod=30 Jan 04 12:10:02 crc kubenswrapper[5003]: I0104 12:10:02.541210 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="821beb7d-9467-4756-8f10-c178e1bcc89e" containerName="ceilometer-notification-agent" containerID="cri-o://c6b22dfb4eaf9c5e39ba2fda19f5b31d279469302c9a6ed03250f62a24bc648f" gracePeriod=30 Jan 04 12:10:02 crc kubenswrapper[5003]: I0104 12:10:02.547740 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7d88cc4d8f-5mhr4" event={"ID":"23de8292-dc91-45db-8de9-59933352e3f2","Type":"ContainerStarted","Data":"312b6e09c8b9a68b781c0ec705d9f1a75feca3742a105d128ddcfa936c23e624"} Jan 04 12:10:02 crc kubenswrapper[5003]: I0104 12:10:02.547778 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-7d88cc4d8f-5mhr4" Jan 04 12:10:02 crc kubenswrapper[5003]: I0104 12:10:02.547788 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7d88cc4d8f-5mhr4" event={"ID":"23de8292-dc91-45db-8de9-59933352e3f2","Type":"ContainerStarted","Data":"3d08041741653f728a1dcd7a717b74f0e1152bf8a3fc8991a888bfea4968ac08"} Jan 04 12:10:02 crc kubenswrapper[5003]: I0104 12:10:02.562547 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-2zm85" podStartSLOduration=3.099178206 podStartE2EDuration="48.562530499s" podCreationTimestamp="2026-01-04 12:09:14 +0000 UTC" firstStartedPulling="2026-01-04 12:09:16.10755734 +0000 UTC m=+1271.580587191" lastFinishedPulling="2026-01-04 12:10:01.570909653 +0000 UTC m=+1317.043939484" observedRunningTime="2026-01-04 12:10:02.548593505 +0000 UTC m=+1318.021623356" watchObservedRunningTime="2026-01-04 12:10:02.562530499 +0000 UTC m=+1318.035560340" Jan 04 12:10:02 crc kubenswrapper[5003]: I0104 12:10:02.578061 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.307540601 podStartE2EDuration="48.578043034s" podCreationTimestamp="2026-01-04 12:09:14 +0000 UTC" firstStartedPulling="2026-01-04 12:09:16.341257048 +0000 UTC m=+1271.814286889" lastFinishedPulling="2026-01-04 12:10:01.611759491 +0000 UTC m=+1317.084789322" observedRunningTime="2026-01-04 12:10:02.574793219 +0000 UTC m=+1318.047823080" watchObservedRunningTime="2026-01-04 12:10:02.578043034 +0000 UTC m=+1318.051072875" Jan 04 12:10:02 crc kubenswrapper[5003]: I0104 12:10:02.607349 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-qx4js" podStartSLOduration=6.994829606 podStartE2EDuration="47.60733008s" podCreationTimestamp="2026-01-04 12:09:15 +0000 UTC" firstStartedPulling="2026-01-04 12:09:16.508000426 +0000 UTC m=+1271.981030267" lastFinishedPulling="2026-01-04 12:09:57.1205009 +0000 UTC m=+1312.593530741" observedRunningTime="2026-01-04 12:10:02.597946565 +0000 UTC m=+1318.070976406" watchObservedRunningTime="2026-01-04 12:10:02.60733008 +0000 UTC m=+1318.080359921" Jan 04 12:10:02 crc kubenswrapper[5003]: I0104 12:10:02.626195 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-7d88cc4d8f-5mhr4" podStartSLOduration=6.626168862 podStartE2EDuration="6.626168862s" podCreationTimestamp="2026-01-04 12:09:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:10:02.617533806 +0000 UTC m=+1318.090563667" watchObservedRunningTime="2026-01-04 12:10:02.626168862 +0000 UTC m=+1318.099198703" Jan 04 12:10:02 crc kubenswrapper[5003]: I0104 12:10:02.796431 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-685444497c-wclfh" Jan 04 12:10:02 crc kubenswrapper[5003]: I0104 12:10:02.872433 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6f6f8cb849-vgfn5"] Jan 04 12:10:02 crc kubenswrapper[5003]: I0104 12:10:02.872775 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6f6f8cb849-vgfn5" podUID="39ba0aea-ad4b-4466-a337-58c4916cc266" containerName="dnsmasq-dns" containerID="cri-o://58bc66315cecaf7bdc3b7f3e10a49dc3af3752df4772fc76596222d0585c9b3b" gracePeriod=10 Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.188315 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.188366 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.236560 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.252697 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.402175 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f6f8cb849-vgfn5" Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.548919 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/39ba0aea-ad4b-4466-a337-58c4916cc266-ovsdbserver-sb\") pod \"39ba0aea-ad4b-4466-a337-58c4916cc266\" (UID: \"39ba0aea-ad4b-4466-a337-58c4916cc266\") " Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.548990 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39ba0aea-ad4b-4466-a337-58c4916cc266-config\") pod \"39ba0aea-ad4b-4466-a337-58c4916cc266\" (UID: \"39ba0aea-ad4b-4466-a337-58c4916cc266\") " Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.549066 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/39ba0aea-ad4b-4466-a337-58c4916cc266-ovsdbserver-nb\") pod \"39ba0aea-ad4b-4466-a337-58c4916cc266\" (UID: \"39ba0aea-ad4b-4466-a337-58c4916cc266\") " Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.549247 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6dchz\" (UniqueName: \"kubernetes.io/projected/39ba0aea-ad4b-4466-a337-58c4916cc266-kube-api-access-6dchz\") pod \"39ba0aea-ad4b-4466-a337-58c4916cc266\" (UID: \"39ba0aea-ad4b-4466-a337-58c4916cc266\") " Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.549280 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/39ba0aea-ad4b-4466-a337-58c4916cc266-dns-svc\") pod \"39ba0aea-ad4b-4466-a337-58c4916cc266\" (UID: \"39ba0aea-ad4b-4466-a337-58c4916cc266\") " Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.549359 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/39ba0aea-ad4b-4466-a337-58c4916cc266-dns-swift-storage-0\") pod \"39ba0aea-ad4b-4466-a337-58c4916cc266\" (UID: \"39ba0aea-ad4b-4466-a337-58c4916cc266\") " Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.565644 5003 generic.go:334] "Generic (PLEG): container finished" podID="821beb7d-9467-4756-8f10-c178e1bcc89e" containerID="f9a671235c548b0fdcd3bbfa106e8b817f91754c8210a710756d753524f1bfac" exitCode=0 Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.566107 5003 generic.go:334] "Generic (PLEG): container finished" podID="821beb7d-9467-4756-8f10-c178e1bcc89e" containerID="bf16335052ed3530c6aa6a89f24742c9555210a833e62c66be2e8c4d8be9218b" exitCode=2 Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.566122 5003 generic.go:334] "Generic (PLEG): container finished" podID="821beb7d-9467-4756-8f10-c178e1bcc89e" containerID="c085d569914ded5d944b3467b6ebb0f3080261379a66abf62ca6879f362fc88c" exitCode=0 Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.566167 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"821beb7d-9467-4756-8f10-c178e1bcc89e","Type":"ContainerDied","Data":"f9a671235c548b0fdcd3bbfa106e8b817f91754c8210a710756d753524f1bfac"} Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.566196 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"821beb7d-9467-4756-8f10-c178e1bcc89e","Type":"ContainerDied","Data":"bf16335052ed3530c6aa6a89f24742c9555210a833e62c66be2e8c4d8be9218b"} Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.566207 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"821beb7d-9467-4756-8f10-c178e1bcc89e","Type":"ContainerDied","Data":"c085d569914ded5d944b3467b6ebb0f3080261379a66abf62ca6879f362fc88c"} Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.566691 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39ba0aea-ad4b-4466-a337-58c4916cc266-kube-api-access-6dchz" (OuterVolumeSpecName: "kube-api-access-6dchz") pod "39ba0aea-ad4b-4466-a337-58c4916cc266" (UID: "39ba0aea-ad4b-4466-a337-58c4916cc266"). InnerVolumeSpecName "kube-api-access-6dchz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.568743 5003 generic.go:334] "Generic (PLEG): container finished" podID="39ba0aea-ad4b-4466-a337-58c4916cc266" containerID="58bc66315cecaf7bdc3b7f3e10a49dc3af3752df4772fc76596222d0585c9b3b" exitCode=0 Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.569134 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f6f8cb849-vgfn5" event={"ID":"39ba0aea-ad4b-4466-a337-58c4916cc266","Type":"ContainerDied","Data":"58bc66315cecaf7bdc3b7f3e10a49dc3af3752df4772fc76596222d0585c9b3b"} Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.569182 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f6f8cb849-vgfn5" event={"ID":"39ba0aea-ad4b-4466-a337-58c4916cc266","Type":"ContainerDied","Data":"068ae1a4771923882266708c4aa80883845ed8bb687f1b272bc79a2c04bd7d3c"} Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.569204 5003 scope.go:117] "RemoveContainer" containerID="58bc66315cecaf7bdc3b7f3e10a49dc3af3752df4772fc76596222d0585c9b3b" Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.569378 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f6f8cb849-vgfn5" Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.570575 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.570613 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.619586 5003 scope.go:117] "RemoveContainer" containerID="290a10abec17be70f9a71d44563646133e37ba9bf169d27baf01abfa7096a9c4" Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.620612 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39ba0aea-ad4b-4466-a337-58c4916cc266-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "39ba0aea-ad4b-4466-a337-58c4916cc266" (UID: "39ba0aea-ad4b-4466-a337-58c4916cc266"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.621389 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39ba0aea-ad4b-4466-a337-58c4916cc266-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "39ba0aea-ad4b-4466-a337-58c4916cc266" (UID: "39ba0aea-ad4b-4466-a337-58c4916cc266"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.628900 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39ba0aea-ad4b-4466-a337-58c4916cc266-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "39ba0aea-ad4b-4466-a337-58c4916cc266" (UID: "39ba0aea-ad4b-4466-a337-58c4916cc266"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.630457 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39ba0aea-ad4b-4466-a337-58c4916cc266-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "39ba0aea-ad4b-4466-a337-58c4916cc266" (UID: "39ba0aea-ad4b-4466-a337-58c4916cc266"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.634680 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39ba0aea-ad4b-4466-a337-58c4916cc266-config" (OuterVolumeSpecName: "config") pod "39ba0aea-ad4b-4466-a337-58c4916cc266" (UID: "39ba0aea-ad4b-4466-a337-58c4916cc266"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.642201 5003 scope.go:117] "RemoveContainer" containerID="58bc66315cecaf7bdc3b7f3e10a49dc3af3752df4772fc76596222d0585c9b3b" Jan 04 12:10:03 crc kubenswrapper[5003]: E0104 12:10:03.642679 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58bc66315cecaf7bdc3b7f3e10a49dc3af3752df4772fc76596222d0585c9b3b\": container with ID starting with 58bc66315cecaf7bdc3b7f3e10a49dc3af3752df4772fc76596222d0585c9b3b not found: ID does not exist" containerID="58bc66315cecaf7bdc3b7f3e10a49dc3af3752df4772fc76596222d0585c9b3b" Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.642742 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58bc66315cecaf7bdc3b7f3e10a49dc3af3752df4772fc76596222d0585c9b3b"} err="failed to get container status \"58bc66315cecaf7bdc3b7f3e10a49dc3af3752df4772fc76596222d0585c9b3b\": rpc error: code = NotFound desc = could not find container \"58bc66315cecaf7bdc3b7f3e10a49dc3af3752df4772fc76596222d0585c9b3b\": container with ID starting with 58bc66315cecaf7bdc3b7f3e10a49dc3af3752df4772fc76596222d0585c9b3b not found: ID does not exist" Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.642779 5003 scope.go:117] "RemoveContainer" containerID="290a10abec17be70f9a71d44563646133e37ba9bf169d27baf01abfa7096a9c4" Jan 04 12:10:03 crc kubenswrapper[5003]: E0104 12:10:03.643184 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"290a10abec17be70f9a71d44563646133e37ba9bf169d27baf01abfa7096a9c4\": container with ID starting with 290a10abec17be70f9a71d44563646133e37ba9bf169d27baf01abfa7096a9c4 not found: ID does not exist" containerID="290a10abec17be70f9a71d44563646133e37ba9bf169d27baf01abfa7096a9c4" Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.643217 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"290a10abec17be70f9a71d44563646133e37ba9bf169d27baf01abfa7096a9c4"} err="failed to get container status \"290a10abec17be70f9a71d44563646133e37ba9bf169d27baf01abfa7096a9c4\": rpc error: code = NotFound desc = could not find container \"290a10abec17be70f9a71d44563646133e37ba9bf169d27baf01abfa7096a9c4\": container with ID starting with 290a10abec17be70f9a71d44563646133e37ba9bf169d27baf01abfa7096a9c4 not found: ID does not exist" Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.651570 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6dchz\" (UniqueName: \"kubernetes.io/projected/39ba0aea-ad4b-4466-a337-58c4916cc266-kube-api-access-6dchz\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.651607 5003 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/39ba0aea-ad4b-4466-a337-58c4916cc266-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.651617 5003 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/39ba0aea-ad4b-4466-a337-58c4916cc266-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.651625 5003 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/39ba0aea-ad4b-4466-a337-58c4916cc266-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.651633 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39ba0aea-ad4b-4466-a337-58c4916cc266-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.651641 5003 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/39ba0aea-ad4b-4466-a337-58c4916cc266-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.902032 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6f6f8cb849-vgfn5"] Jan 04 12:10:03 crc kubenswrapper[5003]: I0104 12:10:03.913815 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6f6f8cb849-vgfn5"] Jan 04 12:10:04 crc kubenswrapper[5003]: I0104 12:10:04.826272 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39ba0aea-ad4b-4466-a337-58c4916cc266" path="/var/lib/kubelet/pods/39ba0aea-ad4b-4466-a337-58c4916cc266/volumes" Jan 04 12:10:05 crc kubenswrapper[5003]: I0104 12:10:05.596436 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:05 crc kubenswrapper[5003]: I0104 12:10:05.598777 5003 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 04 12:10:06 crc kubenswrapper[5003]: I0104 12:10:06.231447 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:06 crc kubenswrapper[5003]: I0104 12:10:06.637428 5003 generic.go:334] "Generic (PLEG): container finished" podID="821beb7d-9467-4756-8f10-c178e1bcc89e" containerID="c6b22dfb4eaf9c5e39ba2fda19f5b31d279469302c9a6ed03250f62a24bc648f" exitCode=0 Jan 04 12:10:06 crc kubenswrapper[5003]: I0104 12:10:06.638024 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"821beb7d-9467-4756-8f10-c178e1bcc89e","Type":"ContainerDied","Data":"c6b22dfb4eaf9c5e39ba2fda19f5b31d279469302c9a6ed03250f62a24bc648f"} Jan 04 12:10:06 crc kubenswrapper[5003]: I0104 12:10:06.753112 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:10:06 crc kubenswrapper[5003]: I0104 12:10:06.842897 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/821beb7d-9467-4756-8f10-c178e1bcc89e-run-httpd\") pod \"821beb7d-9467-4756-8f10-c178e1bcc89e\" (UID: \"821beb7d-9467-4756-8f10-c178e1bcc89e\") " Jan 04 12:10:06 crc kubenswrapper[5003]: I0104 12:10:06.843037 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/821beb7d-9467-4756-8f10-c178e1bcc89e-config-data\") pod \"821beb7d-9467-4756-8f10-c178e1bcc89e\" (UID: \"821beb7d-9467-4756-8f10-c178e1bcc89e\") " Jan 04 12:10:06 crc kubenswrapper[5003]: I0104 12:10:06.843057 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/821beb7d-9467-4756-8f10-c178e1bcc89e-combined-ca-bundle\") pod \"821beb7d-9467-4756-8f10-c178e1bcc89e\" (UID: \"821beb7d-9467-4756-8f10-c178e1bcc89e\") " Jan 04 12:10:06 crc kubenswrapper[5003]: I0104 12:10:06.843180 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/821beb7d-9467-4756-8f10-c178e1bcc89e-sg-core-conf-yaml\") pod \"821beb7d-9467-4756-8f10-c178e1bcc89e\" (UID: \"821beb7d-9467-4756-8f10-c178e1bcc89e\") " Jan 04 12:10:06 crc kubenswrapper[5003]: I0104 12:10:06.843200 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/821beb7d-9467-4756-8f10-c178e1bcc89e-scripts\") pod \"821beb7d-9467-4756-8f10-c178e1bcc89e\" (UID: \"821beb7d-9467-4756-8f10-c178e1bcc89e\") " Jan 04 12:10:06 crc kubenswrapper[5003]: I0104 12:10:06.843282 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/821beb7d-9467-4756-8f10-c178e1bcc89e-log-httpd\") pod \"821beb7d-9467-4756-8f10-c178e1bcc89e\" (UID: \"821beb7d-9467-4756-8f10-c178e1bcc89e\") " Jan 04 12:10:06 crc kubenswrapper[5003]: I0104 12:10:06.843305 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f8mp2\" (UniqueName: \"kubernetes.io/projected/821beb7d-9467-4756-8f10-c178e1bcc89e-kube-api-access-f8mp2\") pod \"821beb7d-9467-4756-8f10-c178e1bcc89e\" (UID: \"821beb7d-9467-4756-8f10-c178e1bcc89e\") " Jan 04 12:10:06 crc kubenswrapper[5003]: I0104 12:10:06.844621 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/821beb7d-9467-4756-8f10-c178e1bcc89e-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "821beb7d-9467-4756-8f10-c178e1bcc89e" (UID: "821beb7d-9467-4756-8f10-c178e1bcc89e"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:10:06 crc kubenswrapper[5003]: I0104 12:10:06.846447 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/821beb7d-9467-4756-8f10-c178e1bcc89e-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "821beb7d-9467-4756-8f10-c178e1bcc89e" (UID: "821beb7d-9467-4756-8f10-c178e1bcc89e"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:10:06 crc kubenswrapper[5003]: I0104 12:10:06.865288 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/821beb7d-9467-4756-8f10-c178e1bcc89e-kube-api-access-f8mp2" (OuterVolumeSpecName: "kube-api-access-f8mp2") pod "821beb7d-9467-4756-8f10-c178e1bcc89e" (UID: "821beb7d-9467-4756-8f10-c178e1bcc89e"). InnerVolumeSpecName "kube-api-access-f8mp2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:06 crc kubenswrapper[5003]: I0104 12:10:06.873233 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/821beb7d-9467-4756-8f10-c178e1bcc89e-scripts" (OuterVolumeSpecName: "scripts") pod "821beb7d-9467-4756-8f10-c178e1bcc89e" (UID: "821beb7d-9467-4756-8f10-c178e1bcc89e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:06 crc kubenswrapper[5003]: I0104 12:10:06.891759 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/821beb7d-9467-4756-8f10-c178e1bcc89e-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "821beb7d-9467-4756-8f10-c178e1bcc89e" (UID: "821beb7d-9467-4756-8f10-c178e1bcc89e"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:06 crc kubenswrapper[5003]: I0104 12:10:06.946313 5003 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/821beb7d-9467-4756-8f10-c178e1bcc89e-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:06 crc kubenswrapper[5003]: I0104 12:10:06.946346 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/821beb7d-9467-4756-8f10-c178e1bcc89e-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:06 crc kubenswrapper[5003]: I0104 12:10:06.946355 5003 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/821beb7d-9467-4756-8f10-c178e1bcc89e-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:06 crc kubenswrapper[5003]: I0104 12:10:06.946366 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f8mp2\" (UniqueName: \"kubernetes.io/projected/821beb7d-9467-4756-8f10-c178e1bcc89e-kube-api-access-f8mp2\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:06 crc kubenswrapper[5003]: I0104 12:10:06.946378 5003 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/821beb7d-9467-4756-8f10-c178e1bcc89e-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:06 crc kubenswrapper[5003]: I0104 12:10:06.948227 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/821beb7d-9467-4756-8f10-c178e1bcc89e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "821beb7d-9467-4756-8f10-c178e1bcc89e" (UID: "821beb7d-9467-4756-8f10-c178e1bcc89e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:06 crc kubenswrapper[5003]: I0104 12:10:06.981558 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/821beb7d-9467-4756-8f10-c178e1bcc89e-config-data" (OuterVolumeSpecName: "config-data") pod "821beb7d-9467-4756-8f10-c178e1bcc89e" (UID: "821beb7d-9467-4756-8f10-c178e1bcc89e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.048198 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/821beb7d-9467-4756-8f10-c178e1bcc89e-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.048228 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/821beb7d-9467-4756-8f10-c178e1bcc89e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.655911 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"821beb7d-9467-4756-8f10-c178e1bcc89e","Type":"ContainerDied","Data":"9e41c9d57b1d33a51fd91ddc8de771b6022a673f1246740aaf79ba205ddeaa7c"} Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.655967 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.655974 5003 scope.go:117] "RemoveContainer" containerID="f9a671235c548b0fdcd3bbfa106e8b817f91754c8210a710756d753524f1bfac" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.665646 5003 generic.go:334] "Generic (PLEG): container finished" podID="c2b0b250-33ee-45aa-baa8-1540d9f39b03" containerID="1ba8bece994985b44dcf19ff5cc432386d9da5fb3a4f2e1cfa6a0f8ee7427c14" exitCode=0 Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.666150 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-qx4js" event={"ID":"c2b0b250-33ee-45aa-baa8-1540d9f39b03","Type":"ContainerDied","Data":"1ba8bece994985b44dcf19ff5cc432386d9da5fb3a4f2e1cfa6a0f8ee7427c14"} Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.708184 5003 scope.go:117] "RemoveContainer" containerID="bf16335052ed3530c6aa6a89f24742c9555210a833e62c66be2e8c4d8be9218b" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.712372 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.718879 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.743353 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:10:07 crc kubenswrapper[5003]: E0104 12:10:07.743769 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39ba0aea-ad4b-4466-a337-58c4916cc266" containerName="dnsmasq-dns" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.743786 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="39ba0aea-ad4b-4466-a337-58c4916cc266" containerName="dnsmasq-dns" Jan 04 12:10:07 crc kubenswrapper[5003]: E0104 12:10:07.743804 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39ba0aea-ad4b-4466-a337-58c4916cc266" containerName="init" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.743811 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="39ba0aea-ad4b-4466-a337-58c4916cc266" containerName="init" Jan 04 12:10:07 crc kubenswrapper[5003]: E0104 12:10:07.743827 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="821beb7d-9467-4756-8f10-c178e1bcc89e" containerName="proxy-httpd" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.743833 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="821beb7d-9467-4756-8f10-c178e1bcc89e" containerName="proxy-httpd" Jan 04 12:10:07 crc kubenswrapper[5003]: E0104 12:10:07.743847 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="821beb7d-9467-4756-8f10-c178e1bcc89e" containerName="ceilometer-notification-agent" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.743853 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="821beb7d-9467-4756-8f10-c178e1bcc89e" containerName="ceilometer-notification-agent" Jan 04 12:10:07 crc kubenswrapper[5003]: E0104 12:10:07.743868 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="821beb7d-9467-4756-8f10-c178e1bcc89e" containerName="sg-core" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.743873 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="821beb7d-9467-4756-8f10-c178e1bcc89e" containerName="sg-core" Jan 04 12:10:07 crc kubenswrapper[5003]: E0104 12:10:07.743884 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="821beb7d-9467-4756-8f10-c178e1bcc89e" containerName="ceilometer-central-agent" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.743890 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="821beb7d-9467-4756-8f10-c178e1bcc89e" containerName="ceilometer-central-agent" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.744070 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="821beb7d-9467-4756-8f10-c178e1bcc89e" containerName="sg-core" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.744106 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="821beb7d-9467-4756-8f10-c178e1bcc89e" containerName="ceilometer-notification-agent" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.744124 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="821beb7d-9467-4756-8f10-c178e1bcc89e" containerName="ceilometer-central-agent" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.744134 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="39ba0aea-ad4b-4466-a337-58c4916cc266" containerName="dnsmasq-dns" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.744171 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="821beb7d-9467-4756-8f10-c178e1bcc89e" containerName="proxy-httpd" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.746879 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.751339 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.751621 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.758496 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.769899 5003 scope.go:117] "RemoveContainer" containerID="c6b22dfb4eaf9c5e39ba2fda19f5b31d279469302c9a6ed03250f62a24bc648f" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.797588 5003 scope.go:117] "RemoveContainer" containerID="c085d569914ded5d944b3467b6ebb0f3080261379a66abf62ca6879f362fc88c" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.869809 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/235041d8-b61c-4f18-b352-c07ae69e5c49-run-httpd\") pod \"ceilometer-0\" (UID: \"235041d8-b61c-4f18-b352-c07ae69e5c49\") " pod="openstack/ceilometer-0" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.870298 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfd57\" (UniqueName: \"kubernetes.io/projected/235041d8-b61c-4f18-b352-c07ae69e5c49-kube-api-access-rfd57\") pod \"ceilometer-0\" (UID: \"235041d8-b61c-4f18-b352-c07ae69e5c49\") " pod="openstack/ceilometer-0" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.870774 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/235041d8-b61c-4f18-b352-c07ae69e5c49-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"235041d8-b61c-4f18-b352-c07ae69e5c49\") " pod="openstack/ceilometer-0" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.870900 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/235041d8-b61c-4f18-b352-c07ae69e5c49-config-data\") pod \"ceilometer-0\" (UID: \"235041d8-b61c-4f18-b352-c07ae69e5c49\") " pod="openstack/ceilometer-0" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.870926 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/235041d8-b61c-4f18-b352-c07ae69e5c49-scripts\") pod \"ceilometer-0\" (UID: \"235041d8-b61c-4f18-b352-c07ae69e5c49\") " pod="openstack/ceilometer-0" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.870946 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/235041d8-b61c-4f18-b352-c07ae69e5c49-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"235041d8-b61c-4f18-b352-c07ae69e5c49\") " pod="openstack/ceilometer-0" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.870972 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/235041d8-b61c-4f18-b352-c07ae69e5c49-log-httpd\") pod \"ceilometer-0\" (UID: \"235041d8-b61c-4f18-b352-c07ae69e5c49\") " pod="openstack/ceilometer-0" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.971907 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/235041d8-b61c-4f18-b352-c07ae69e5c49-log-httpd\") pod \"ceilometer-0\" (UID: \"235041d8-b61c-4f18-b352-c07ae69e5c49\") " pod="openstack/ceilometer-0" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.971989 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/235041d8-b61c-4f18-b352-c07ae69e5c49-run-httpd\") pod \"ceilometer-0\" (UID: \"235041d8-b61c-4f18-b352-c07ae69e5c49\") " pod="openstack/ceilometer-0" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.972042 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfd57\" (UniqueName: \"kubernetes.io/projected/235041d8-b61c-4f18-b352-c07ae69e5c49-kube-api-access-rfd57\") pod \"ceilometer-0\" (UID: \"235041d8-b61c-4f18-b352-c07ae69e5c49\") " pod="openstack/ceilometer-0" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.972078 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/235041d8-b61c-4f18-b352-c07ae69e5c49-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"235041d8-b61c-4f18-b352-c07ae69e5c49\") " pod="openstack/ceilometer-0" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.972445 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/235041d8-b61c-4f18-b352-c07ae69e5c49-config-data\") pod \"ceilometer-0\" (UID: \"235041d8-b61c-4f18-b352-c07ae69e5c49\") " pod="openstack/ceilometer-0" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.972472 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/235041d8-b61c-4f18-b352-c07ae69e5c49-scripts\") pod \"ceilometer-0\" (UID: \"235041d8-b61c-4f18-b352-c07ae69e5c49\") " pod="openstack/ceilometer-0" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.972558 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/235041d8-b61c-4f18-b352-c07ae69e5c49-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"235041d8-b61c-4f18-b352-c07ae69e5c49\") " pod="openstack/ceilometer-0" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.972568 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/235041d8-b61c-4f18-b352-c07ae69e5c49-run-httpd\") pod \"ceilometer-0\" (UID: \"235041d8-b61c-4f18-b352-c07ae69e5c49\") " pod="openstack/ceilometer-0" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.972615 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/235041d8-b61c-4f18-b352-c07ae69e5c49-log-httpd\") pod \"ceilometer-0\" (UID: \"235041d8-b61c-4f18-b352-c07ae69e5c49\") " pod="openstack/ceilometer-0" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.977434 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/235041d8-b61c-4f18-b352-c07ae69e5c49-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"235041d8-b61c-4f18-b352-c07ae69e5c49\") " pod="openstack/ceilometer-0" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.977838 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/235041d8-b61c-4f18-b352-c07ae69e5c49-config-data\") pod \"ceilometer-0\" (UID: \"235041d8-b61c-4f18-b352-c07ae69e5c49\") " pod="openstack/ceilometer-0" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.986469 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/235041d8-b61c-4f18-b352-c07ae69e5c49-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"235041d8-b61c-4f18-b352-c07ae69e5c49\") " pod="openstack/ceilometer-0" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.988645 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/235041d8-b61c-4f18-b352-c07ae69e5c49-scripts\") pod \"ceilometer-0\" (UID: \"235041d8-b61c-4f18-b352-c07ae69e5c49\") " pod="openstack/ceilometer-0" Jan 04 12:10:07 crc kubenswrapper[5003]: I0104 12:10:07.991409 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfd57\" (UniqueName: \"kubernetes.io/projected/235041d8-b61c-4f18-b352-c07ae69e5c49-kube-api-access-rfd57\") pod \"ceilometer-0\" (UID: \"235041d8-b61c-4f18-b352-c07ae69e5c49\") " pod="openstack/ceilometer-0" Jan 04 12:10:08 crc kubenswrapper[5003]: I0104 12:10:08.075668 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:10:08 crc kubenswrapper[5003]: I0104 12:10:08.562956 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:10:08 crc kubenswrapper[5003]: I0104 12:10:08.676585 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"235041d8-b61c-4f18-b352-c07ae69e5c49","Type":"ContainerStarted","Data":"00a243e4e8ed59fcbadfe7e64a3bbccfbe59c2ac2733550d612bc3f828eabc08"} Jan 04 12:10:08 crc kubenswrapper[5003]: I0104 12:10:08.826757 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="821beb7d-9467-4756-8f10-c178e1bcc89e" path="/var/lib/kubelet/pods/821beb7d-9467-4756-8f10-c178e1bcc89e/volumes" Jan 04 12:10:09 crc kubenswrapper[5003]: I0104 12:10:09.038792 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-qx4js" Jan 04 12:10:09 crc kubenswrapper[5003]: I0104 12:10:09.098453 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ghd9k\" (UniqueName: \"kubernetes.io/projected/c2b0b250-33ee-45aa-baa8-1540d9f39b03-kube-api-access-ghd9k\") pod \"c2b0b250-33ee-45aa-baa8-1540d9f39b03\" (UID: \"c2b0b250-33ee-45aa-baa8-1540d9f39b03\") " Jan 04 12:10:09 crc kubenswrapper[5003]: I0104 12:10:09.098997 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c2b0b250-33ee-45aa-baa8-1540d9f39b03-db-sync-config-data\") pod \"c2b0b250-33ee-45aa-baa8-1540d9f39b03\" (UID: \"c2b0b250-33ee-45aa-baa8-1540d9f39b03\") " Jan 04 12:10:09 crc kubenswrapper[5003]: I0104 12:10:09.099231 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2b0b250-33ee-45aa-baa8-1540d9f39b03-combined-ca-bundle\") pod \"c2b0b250-33ee-45aa-baa8-1540d9f39b03\" (UID: \"c2b0b250-33ee-45aa-baa8-1540d9f39b03\") " Jan 04 12:10:09 crc kubenswrapper[5003]: I0104 12:10:09.107194 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2b0b250-33ee-45aa-baa8-1540d9f39b03-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "c2b0b250-33ee-45aa-baa8-1540d9f39b03" (UID: "c2b0b250-33ee-45aa-baa8-1540d9f39b03"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:09 crc kubenswrapper[5003]: I0104 12:10:09.109197 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2b0b250-33ee-45aa-baa8-1540d9f39b03-kube-api-access-ghd9k" (OuterVolumeSpecName: "kube-api-access-ghd9k") pod "c2b0b250-33ee-45aa-baa8-1540d9f39b03" (UID: "c2b0b250-33ee-45aa-baa8-1540d9f39b03"). InnerVolumeSpecName "kube-api-access-ghd9k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:09 crc kubenswrapper[5003]: I0104 12:10:09.133140 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2b0b250-33ee-45aa-baa8-1540d9f39b03-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c2b0b250-33ee-45aa-baa8-1540d9f39b03" (UID: "c2b0b250-33ee-45aa-baa8-1540d9f39b03"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:09 crc kubenswrapper[5003]: I0104 12:10:09.201219 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ghd9k\" (UniqueName: \"kubernetes.io/projected/c2b0b250-33ee-45aa-baa8-1540d9f39b03-kube-api-access-ghd9k\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:09 crc kubenswrapper[5003]: I0104 12:10:09.201260 5003 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c2b0b250-33ee-45aa-baa8-1540d9f39b03-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:09 crc kubenswrapper[5003]: I0104 12:10:09.201273 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2b0b250-33ee-45aa-baa8-1540d9f39b03-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:09 crc kubenswrapper[5003]: I0104 12:10:09.419355 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:10:09 crc kubenswrapper[5003]: I0104 12:10:09.419832 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:10:09 crc kubenswrapper[5003]: I0104 12:10:09.691281 5003 generic.go:334] "Generic (PLEG): container finished" podID="b47fe1da-877a-4d5e-a21b-c7955bd00b30" containerID="3d6ec01453584d54c02c4502ffaf6568f5d140cf80440af350ba37f5fa541403" exitCode=0 Jan 04 12:10:09 crc kubenswrapper[5003]: I0104 12:10:09.693108 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-2zm85" event={"ID":"b47fe1da-877a-4d5e-a21b-c7955bd00b30","Type":"ContainerDied","Data":"3d6ec01453584d54c02c4502ffaf6568f5d140cf80440af350ba37f5fa541403"} Jan 04 12:10:09 crc kubenswrapper[5003]: I0104 12:10:09.695211 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-qx4js" event={"ID":"c2b0b250-33ee-45aa-baa8-1540d9f39b03","Type":"ContainerDied","Data":"5ee917fddda1a94c7bd5209bb637ecb517e5f26bb8e5dc4d6a4f868837ab9f3d"} Jan 04 12:10:09 crc kubenswrapper[5003]: I0104 12:10:09.695237 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5ee917fddda1a94c7bd5209bb637ecb517e5f26bb8e5dc4d6a4f868837ab9f3d" Jan 04 12:10:09 crc kubenswrapper[5003]: I0104 12:10:09.695379 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-qx4js" Jan 04 12:10:09 crc kubenswrapper[5003]: I0104 12:10:09.703299 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"235041d8-b61c-4f18-b352-c07ae69e5c49","Type":"ContainerStarted","Data":"9c193888e3c481aed219acf655f13d78e575780d3531aedb6e32f5b693f84b85"} Jan 04 12:10:09 crc kubenswrapper[5003]: I0104 12:10:09.972587 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-5d455558d5-f58qc"] Jan 04 12:10:09 crc kubenswrapper[5003]: E0104 12:10:09.974625 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2b0b250-33ee-45aa-baa8-1540d9f39b03" containerName="barbican-db-sync" Jan 04 12:10:09 crc kubenswrapper[5003]: I0104 12:10:09.974647 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2b0b250-33ee-45aa-baa8-1540d9f39b03" containerName="barbican-db-sync" Jan 04 12:10:09 crc kubenswrapper[5003]: I0104 12:10:09.974883 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2b0b250-33ee-45aa-baa8-1540d9f39b03" containerName="barbican-db-sync" Jan 04 12:10:09 crc kubenswrapper[5003]: I0104 12:10:09.975947 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5d455558d5-f58qc" Jan 04 12:10:09 crc kubenswrapper[5003]: I0104 12:10:09.981889 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Jan 04 12:10:09 crc kubenswrapper[5003]: I0104 12:10:09.982094 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-8f5bx" Jan 04 12:10:09 crc kubenswrapper[5003]: I0104 12:10:09.984527 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.020418 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrkkk\" (UniqueName: \"kubernetes.io/projected/f0dcef7e-0621-4399-b967-5d5f90dd695f-kube-api-access-jrkkk\") pod \"barbican-worker-5d455558d5-f58qc\" (UID: \"f0dcef7e-0621-4399-b967-5d5f90dd695f\") " pod="openstack/barbican-worker-5d455558d5-f58qc" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.020939 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0dcef7e-0621-4399-b967-5d5f90dd695f-config-data\") pod \"barbican-worker-5d455558d5-f58qc\" (UID: \"f0dcef7e-0621-4399-b967-5d5f90dd695f\") " pod="openstack/barbican-worker-5d455558d5-f58qc" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.020967 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f0dcef7e-0621-4399-b967-5d5f90dd695f-config-data-custom\") pod \"barbican-worker-5d455558d5-f58qc\" (UID: \"f0dcef7e-0621-4399-b967-5d5f90dd695f\") " pod="openstack/barbican-worker-5d455558d5-f58qc" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.020991 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0dcef7e-0621-4399-b967-5d5f90dd695f-logs\") pod \"barbican-worker-5d455558d5-f58qc\" (UID: \"f0dcef7e-0621-4399-b967-5d5f90dd695f\") " pod="openstack/barbican-worker-5d455558d5-f58qc" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.021043 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0dcef7e-0621-4399-b967-5d5f90dd695f-combined-ca-bundle\") pod \"barbican-worker-5d455558d5-f58qc\" (UID: \"f0dcef7e-0621-4399-b967-5d5f90dd695f\") " pod="openstack/barbican-worker-5d455558d5-f58qc" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.021547 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5d455558d5-f58qc"] Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.070297 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-644b6c944d-sd84t"] Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.072248 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-644b6c944d-sd84t" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.080960 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.099833 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-644b6c944d-sd84t"] Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.122219 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a29676ba-4d56-4b2e-a92f-c83b5f25345a-config-data-custom\") pod \"barbican-keystone-listener-644b6c944d-sd84t\" (UID: \"a29676ba-4d56-4b2e-a92f-c83b5f25345a\") " pod="openstack/barbican-keystone-listener-644b6c944d-sd84t" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.122285 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a29676ba-4d56-4b2e-a92f-c83b5f25345a-combined-ca-bundle\") pod \"barbican-keystone-listener-644b6c944d-sd84t\" (UID: \"a29676ba-4d56-4b2e-a92f-c83b5f25345a\") " pod="openstack/barbican-keystone-listener-644b6c944d-sd84t" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.122339 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a29676ba-4d56-4b2e-a92f-c83b5f25345a-config-data\") pod \"barbican-keystone-listener-644b6c944d-sd84t\" (UID: \"a29676ba-4d56-4b2e-a92f-c83b5f25345a\") " pod="openstack/barbican-keystone-listener-644b6c944d-sd84t" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.122381 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0dcef7e-0621-4399-b967-5d5f90dd695f-config-data\") pod \"barbican-worker-5d455558d5-f58qc\" (UID: \"f0dcef7e-0621-4399-b967-5d5f90dd695f\") " pod="openstack/barbican-worker-5d455558d5-f58qc" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.122400 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f0dcef7e-0621-4399-b967-5d5f90dd695f-config-data-custom\") pod \"barbican-worker-5d455558d5-f58qc\" (UID: \"f0dcef7e-0621-4399-b967-5d5f90dd695f\") " pod="openstack/barbican-worker-5d455558d5-f58qc" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.122421 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0dcef7e-0621-4399-b967-5d5f90dd695f-logs\") pod \"barbican-worker-5d455558d5-f58qc\" (UID: \"f0dcef7e-0621-4399-b967-5d5f90dd695f\") " pod="openstack/barbican-worker-5d455558d5-f58qc" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.122465 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0dcef7e-0621-4399-b967-5d5f90dd695f-combined-ca-bundle\") pod \"barbican-worker-5d455558d5-f58qc\" (UID: \"f0dcef7e-0621-4399-b967-5d5f90dd695f\") " pod="openstack/barbican-worker-5d455558d5-f58qc" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.122493 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vp28l\" (UniqueName: \"kubernetes.io/projected/a29676ba-4d56-4b2e-a92f-c83b5f25345a-kube-api-access-vp28l\") pod \"barbican-keystone-listener-644b6c944d-sd84t\" (UID: \"a29676ba-4d56-4b2e-a92f-c83b5f25345a\") " pod="openstack/barbican-keystone-listener-644b6c944d-sd84t" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.122538 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrkkk\" (UniqueName: \"kubernetes.io/projected/f0dcef7e-0621-4399-b967-5d5f90dd695f-kube-api-access-jrkkk\") pod \"barbican-worker-5d455558d5-f58qc\" (UID: \"f0dcef7e-0621-4399-b967-5d5f90dd695f\") " pod="openstack/barbican-worker-5d455558d5-f58qc" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.122558 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a29676ba-4d56-4b2e-a92f-c83b5f25345a-logs\") pod \"barbican-keystone-listener-644b6c944d-sd84t\" (UID: \"a29676ba-4d56-4b2e-a92f-c83b5f25345a\") " pod="openstack/barbican-keystone-listener-644b6c944d-sd84t" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.130525 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0dcef7e-0621-4399-b967-5d5f90dd695f-logs\") pod \"barbican-worker-5d455558d5-f58qc\" (UID: \"f0dcef7e-0621-4399-b967-5d5f90dd695f\") " pod="openstack/barbican-worker-5d455558d5-f58qc" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.135280 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0dcef7e-0621-4399-b967-5d5f90dd695f-combined-ca-bundle\") pod \"barbican-worker-5d455558d5-f58qc\" (UID: \"f0dcef7e-0621-4399-b967-5d5f90dd695f\") " pod="openstack/barbican-worker-5d455558d5-f58qc" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.137551 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0dcef7e-0621-4399-b967-5d5f90dd695f-config-data\") pod \"barbican-worker-5d455558d5-f58qc\" (UID: \"f0dcef7e-0621-4399-b967-5d5f90dd695f\") " pod="openstack/barbican-worker-5d455558d5-f58qc" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.164467 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f0dcef7e-0621-4399-b967-5d5f90dd695f-config-data-custom\") pod \"barbican-worker-5d455558d5-f58qc\" (UID: \"f0dcef7e-0621-4399-b967-5d5f90dd695f\") " pod="openstack/barbican-worker-5d455558d5-f58qc" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.167150 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrkkk\" (UniqueName: \"kubernetes.io/projected/f0dcef7e-0621-4399-b967-5d5f90dd695f-kube-api-access-jrkkk\") pod \"barbican-worker-5d455558d5-f58qc\" (UID: \"f0dcef7e-0621-4399-b967-5d5f90dd695f\") " pod="openstack/barbican-worker-5d455558d5-f58qc" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.171406 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-66cdd4b5b5-z7rpl"] Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.173070 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66cdd4b5b5-z7rpl" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.203362 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-66cdd4b5b5-z7rpl"] Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.230146 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a29676ba-4d56-4b2e-a92f-c83b5f25345a-config-data\") pod \"barbican-keystone-listener-644b6c944d-sd84t\" (UID: \"a29676ba-4d56-4b2e-a92f-c83b5f25345a\") " pod="openstack/barbican-keystone-listener-644b6c944d-sd84t" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.230203 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-ovsdbserver-sb\") pod \"dnsmasq-dns-66cdd4b5b5-z7rpl\" (UID: \"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-z7rpl" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.230226 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-ovsdbserver-nb\") pod \"dnsmasq-dns-66cdd4b5b5-z7rpl\" (UID: \"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-z7rpl" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.230268 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-dns-swift-storage-0\") pod \"dnsmasq-dns-66cdd4b5b5-z7rpl\" (UID: \"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-z7rpl" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.230300 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-dns-svc\") pod \"dnsmasq-dns-66cdd4b5b5-z7rpl\" (UID: \"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-z7rpl" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.230322 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vp28l\" (UniqueName: \"kubernetes.io/projected/a29676ba-4d56-4b2e-a92f-c83b5f25345a-kube-api-access-vp28l\") pod \"barbican-keystone-listener-644b6c944d-sd84t\" (UID: \"a29676ba-4d56-4b2e-a92f-c83b5f25345a\") " pod="openstack/barbican-keystone-listener-644b6c944d-sd84t" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.230358 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a29676ba-4d56-4b2e-a92f-c83b5f25345a-logs\") pod \"barbican-keystone-listener-644b6c944d-sd84t\" (UID: \"a29676ba-4d56-4b2e-a92f-c83b5f25345a\") " pod="openstack/barbican-keystone-listener-644b6c944d-sd84t" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.230398 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a29676ba-4d56-4b2e-a92f-c83b5f25345a-config-data-custom\") pod \"barbican-keystone-listener-644b6c944d-sd84t\" (UID: \"a29676ba-4d56-4b2e-a92f-c83b5f25345a\") " pod="openstack/barbican-keystone-listener-644b6c944d-sd84t" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.230424 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a29676ba-4d56-4b2e-a92f-c83b5f25345a-combined-ca-bundle\") pod \"barbican-keystone-listener-644b6c944d-sd84t\" (UID: \"a29676ba-4d56-4b2e-a92f-c83b5f25345a\") " pod="openstack/barbican-keystone-listener-644b6c944d-sd84t" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.230439 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-config\") pod \"dnsmasq-dns-66cdd4b5b5-z7rpl\" (UID: \"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-z7rpl" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.230471 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sp7qv\" (UniqueName: \"kubernetes.io/projected/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-kube-api-access-sp7qv\") pod \"dnsmasq-dns-66cdd4b5b5-z7rpl\" (UID: \"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-z7rpl" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.231081 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a29676ba-4d56-4b2e-a92f-c83b5f25345a-logs\") pod \"barbican-keystone-listener-644b6c944d-sd84t\" (UID: \"a29676ba-4d56-4b2e-a92f-c83b5f25345a\") " pod="openstack/barbican-keystone-listener-644b6c944d-sd84t" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.243437 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a29676ba-4d56-4b2e-a92f-c83b5f25345a-config-data\") pod \"barbican-keystone-listener-644b6c944d-sd84t\" (UID: \"a29676ba-4d56-4b2e-a92f-c83b5f25345a\") " pod="openstack/barbican-keystone-listener-644b6c944d-sd84t" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.246280 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a29676ba-4d56-4b2e-a92f-c83b5f25345a-config-data-custom\") pod \"barbican-keystone-listener-644b6c944d-sd84t\" (UID: \"a29676ba-4d56-4b2e-a92f-c83b5f25345a\") " pod="openstack/barbican-keystone-listener-644b6c944d-sd84t" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.247591 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a29676ba-4d56-4b2e-a92f-c83b5f25345a-combined-ca-bundle\") pod \"barbican-keystone-listener-644b6c944d-sd84t\" (UID: \"a29676ba-4d56-4b2e-a92f-c83b5f25345a\") " pod="openstack/barbican-keystone-listener-644b6c944d-sd84t" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.256535 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-69bb8d8f84-sdsn4"] Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.257617 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vp28l\" (UniqueName: \"kubernetes.io/projected/a29676ba-4d56-4b2e-a92f-c83b5f25345a-kube-api-access-vp28l\") pod \"barbican-keystone-listener-644b6c944d-sd84t\" (UID: \"a29676ba-4d56-4b2e-a92f-c83b5f25345a\") " pod="openstack/barbican-keystone-listener-644b6c944d-sd84t" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.258070 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-69bb8d8f84-sdsn4" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.263989 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.282747 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-69bb8d8f84-sdsn4"] Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.332093 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-dns-svc\") pod \"dnsmasq-dns-66cdd4b5b5-z7rpl\" (UID: \"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-z7rpl" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.332163 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1a785b98-dc58-469e-b1fd-4dba03f79d63-logs\") pod \"barbican-api-69bb8d8f84-sdsn4\" (UID: \"1a785b98-dc58-469e-b1fd-4dba03f79d63\") " pod="openstack/barbican-api-69bb8d8f84-sdsn4" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.332230 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-config\") pod \"dnsmasq-dns-66cdd4b5b5-z7rpl\" (UID: \"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-z7rpl" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.332255 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a785b98-dc58-469e-b1fd-4dba03f79d63-config-data\") pod \"barbican-api-69bb8d8f84-sdsn4\" (UID: \"1a785b98-dc58-469e-b1fd-4dba03f79d63\") " pod="openstack/barbican-api-69bb8d8f84-sdsn4" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.332280 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a785b98-dc58-469e-b1fd-4dba03f79d63-combined-ca-bundle\") pod \"barbican-api-69bb8d8f84-sdsn4\" (UID: \"1a785b98-dc58-469e-b1fd-4dba03f79d63\") " pod="openstack/barbican-api-69bb8d8f84-sdsn4" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.332304 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sp7qv\" (UniqueName: \"kubernetes.io/projected/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-kube-api-access-sp7qv\") pod \"dnsmasq-dns-66cdd4b5b5-z7rpl\" (UID: \"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-z7rpl" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.332398 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-ovsdbserver-sb\") pod \"dnsmasq-dns-66cdd4b5b5-z7rpl\" (UID: \"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-z7rpl" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.332439 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-ovsdbserver-nb\") pod \"dnsmasq-dns-66cdd4b5b5-z7rpl\" (UID: \"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-z7rpl" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.332491 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1a785b98-dc58-469e-b1fd-4dba03f79d63-config-data-custom\") pod \"barbican-api-69bb8d8f84-sdsn4\" (UID: \"1a785b98-dc58-469e-b1fd-4dba03f79d63\") " pod="openstack/barbican-api-69bb8d8f84-sdsn4" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.332574 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-dns-swift-storage-0\") pod \"dnsmasq-dns-66cdd4b5b5-z7rpl\" (UID: \"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-z7rpl" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.332618 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8qvht\" (UniqueName: \"kubernetes.io/projected/1a785b98-dc58-469e-b1fd-4dba03f79d63-kube-api-access-8qvht\") pod \"barbican-api-69bb8d8f84-sdsn4\" (UID: \"1a785b98-dc58-469e-b1fd-4dba03f79d63\") " pod="openstack/barbican-api-69bb8d8f84-sdsn4" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.333499 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-config\") pod \"dnsmasq-dns-66cdd4b5b5-z7rpl\" (UID: \"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-z7rpl" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.333500 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-ovsdbserver-sb\") pod \"dnsmasq-dns-66cdd4b5b5-z7rpl\" (UID: \"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-z7rpl" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.333566 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-dns-svc\") pod \"dnsmasq-dns-66cdd4b5b5-z7rpl\" (UID: \"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-z7rpl" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.336297 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-dns-swift-storage-0\") pod \"dnsmasq-dns-66cdd4b5b5-z7rpl\" (UID: \"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-z7rpl" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.336409 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-ovsdbserver-nb\") pod \"dnsmasq-dns-66cdd4b5b5-z7rpl\" (UID: \"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-z7rpl" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.339893 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5d455558d5-f58qc" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.353794 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sp7qv\" (UniqueName: \"kubernetes.io/projected/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-kube-api-access-sp7qv\") pod \"dnsmasq-dns-66cdd4b5b5-z7rpl\" (UID: \"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9\") " pod="openstack/dnsmasq-dns-66cdd4b5b5-z7rpl" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.399130 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-644b6c944d-sd84t" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.437721 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8qvht\" (UniqueName: \"kubernetes.io/projected/1a785b98-dc58-469e-b1fd-4dba03f79d63-kube-api-access-8qvht\") pod \"barbican-api-69bb8d8f84-sdsn4\" (UID: \"1a785b98-dc58-469e-b1fd-4dba03f79d63\") " pod="openstack/barbican-api-69bb8d8f84-sdsn4" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.437825 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1a785b98-dc58-469e-b1fd-4dba03f79d63-logs\") pod \"barbican-api-69bb8d8f84-sdsn4\" (UID: \"1a785b98-dc58-469e-b1fd-4dba03f79d63\") " pod="openstack/barbican-api-69bb8d8f84-sdsn4" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.437934 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a785b98-dc58-469e-b1fd-4dba03f79d63-config-data\") pod \"barbican-api-69bb8d8f84-sdsn4\" (UID: \"1a785b98-dc58-469e-b1fd-4dba03f79d63\") " pod="openstack/barbican-api-69bb8d8f84-sdsn4" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.437970 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a785b98-dc58-469e-b1fd-4dba03f79d63-combined-ca-bundle\") pod \"barbican-api-69bb8d8f84-sdsn4\" (UID: \"1a785b98-dc58-469e-b1fd-4dba03f79d63\") " pod="openstack/barbican-api-69bb8d8f84-sdsn4" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.438056 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1a785b98-dc58-469e-b1fd-4dba03f79d63-config-data-custom\") pod \"barbican-api-69bb8d8f84-sdsn4\" (UID: \"1a785b98-dc58-469e-b1fd-4dba03f79d63\") " pod="openstack/barbican-api-69bb8d8f84-sdsn4" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.439395 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1a785b98-dc58-469e-b1fd-4dba03f79d63-logs\") pod \"barbican-api-69bb8d8f84-sdsn4\" (UID: \"1a785b98-dc58-469e-b1fd-4dba03f79d63\") " pod="openstack/barbican-api-69bb8d8f84-sdsn4" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.442396 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a785b98-dc58-469e-b1fd-4dba03f79d63-combined-ca-bundle\") pod \"barbican-api-69bb8d8f84-sdsn4\" (UID: \"1a785b98-dc58-469e-b1fd-4dba03f79d63\") " pod="openstack/barbican-api-69bb8d8f84-sdsn4" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.448340 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1a785b98-dc58-469e-b1fd-4dba03f79d63-config-data-custom\") pod \"barbican-api-69bb8d8f84-sdsn4\" (UID: \"1a785b98-dc58-469e-b1fd-4dba03f79d63\") " pod="openstack/barbican-api-69bb8d8f84-sdsn4" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.448523 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a785b98-dc58-469e-b1fd-4dba03f79d63-config-data\") pod \"barbican-api-69bb8d8f84-sdsn4\" (UID: \"1a785b98-dc58-469e-b1fd-4dba03f79d63\") " pod="openstack/barbican-api-69bb8d8f84-sdsn4" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.454556 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8qvht\" (UniqueName: \"kubernetes.io/projected/1a785b98-dc58-469e-b1fd-4dba03f79d63-kube-api-access-8qvht\") pod \"barbican-api-69bb8d8f84-sdsn4\" (UID: \"1a785b98-dc58-469e-b1fd-4dba03f79d63\") " pod="openstack/barbican-api-69bb8d8f84-sdsn4" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.514503 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66cdd4b5b5-z7rpl" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.579477 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-69bb8d8f84-sdsn4" Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.719934 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"235041d8-b61c-4f18-b352-c07ae69e5c49","Type":"ContainerStarted","Data":"713301b7ac7149d0d418c7d8c3bcfff41bc88b397b49860b2763db0f75492542"} Jan 04 12:10:10 crc kubenswrapper[5003]: I0104 12:10:10.831108 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5d455558d5-f58qc"] Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.048749 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-644b6c944d-sd84t"] Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.182775 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-66cdd4b5b5-z7rpl"] Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.310435 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-69bb8d8f84-sdsn4"] Jan 04 12:10:11 crc kubenswrapper[5003]: W0104 12:10:11.318568 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1a785b98_dc58_469e_b1fd_4dba03f79d63.slice/crio-0469edb717d134607e05a2e5ebaf68614da6112ae9c5a65082ff579ed0d27c1d WatchSource:0}: Error finding container 0469edb717d134607e05a2e5ebaf68614da6112ae9c5a65082ff579ed0d27c1d: Status 404 returned error can't find the container with id 0469edb717d134607e05a2e5ebaf68614da6112ae9c5a65082ff579ed0d27c1d Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.342773 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-2zm85" Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.476277 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b47fe1da-877a-4d5e-a21b-c7955bd00b30-etc-machine-id\") pod \"b47fe1da-877a-4d5e-a21b-c7955bd00b30\" (UID: \"b47fe1da-877a-4d5e-a21b-c7955bd00b30\") " Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.476400 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b47fe1da-877a-4d5e-a21b-c7955bd00b30-combined-ca-bundle\") pod \"b47fe1da-877a-4d5e-a21b-c7955bd00b30\" (UID: \"b47fe1da-877a-4d5e-a21b-c7955bd00b30\") " Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.476551 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b47fe1da-877a-4d5e-a21b-c7955bd00b30-db-sync-config-data\") pod \"b47fe1da-877a-4d5e-a21b-c7955bd00b30\" (UID: \"b47fe1da-877a-4d5e-a21b-c7955bd00b30\") " Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.476613 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b47fe1da-877a-4d5e-a21b-c7955bd00b30-scripts\") pod \"b47fe1da-877a-4d5e-a21b-c7955bd00b30\" (UID: \"b47fe1da-877a-4d5e-a21b-c7955bd00b30\") " Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.476768 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b47fe1da-877a-4d5e-a21b-c7955bd00b30-config-data\") pod \"b47fe1da-877a-4d5e-a21b-c7955bd00b30\" (UID: \"b47fe1da-877a-4d5e-a21b-c7955bd00b30\") " Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.477160 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-trhwh\" (UniqueName: \"kubernetes.io/projected/b47fe1da-877a-4d5e-a21b-c7955bd00b30-kube-api-access-trhwh\") pod \"b47fe1da-877a-4d5e-a21b-c7955bd00b30\" (UID: \"b47fe1da-877a-4d5e-a21b-c7955bd00b30\") " Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.477636 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b47fe1da-877a-4d5e-a21b-c7955bd00b30-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "b47fe1da-877a-4d5e-a21b-c7955bd00b30" (UID: "b47fe1da-877a-4d5e-a21b-c7955bd00b30"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.478334 5003 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b47fe1da-877a-4d5e-a21b-c7955bd00b30-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.483488 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b47fe1da-877a-4d5e-a21b-c7955bd00b30-scripts" (OuterVolumeSpecName: "scripts") pod "b47fe1da-877a-4d5e-a21b-c7955bd00b30" (UID: "b47fe1da-877a-4d5e-a21b-c7955bd00b30"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.483517 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b47fe1da-877a-4d5e-a21b-c7955bd00b30-kube-api-access-trhwh" (OuterVolumeSpecName: "kube-api-access-trhwh") pod "b47fe1da-877a-4d5e-a21b-c7955bd00b30" (UID: "b47fe1da-877a-4d5e-a21b-c7955bd00b30"). InnerVolumeSpecName "kube-api-access-trhwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.483758 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b47fe1da-877a-4d5e-a21b-c7955bd00b30-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "b47fe1da-877a-4d5e-a21b-c7955bd00b30" (UID: "b47fe1da-877a-4d5e-a21b-c7955bd00b30"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.516845 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b47fe1da-877a-4d5e-a21b-c7955bd00b30-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b47fe1da-877a-4d5e-a21b-c7955bd00b30" (UID: "b47fe1da-877a-4d5e-a21b-c7955bd00b30"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.541927 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b47fe1da-877a-4d5e-a21b-c7955bd00b30-config-data" (OuterVolumeSpecName: "config-data") pod "b47fe1da-877a-4d5e-a21b-c7955bd00b30" (UID: "b47fe1da-877a-4d5e-a21b-c7955bd00b30"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.579811 5003 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b47fe1da-877a-4d5e-a21b-c7955bd00b30-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.579846 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b47fe1da-877a-4d5e-a21b-c7955bd00b30-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.579855 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b47fe1da-877a-4d5e-a21b-c7955bd00b30-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.579863 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-trhwh\" (UniqueName: \"kubernetes.io/projected/b47fe1da-877a-4d5e-a21b-c7955bd00b30-kube-api-access-trhwh\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.579873 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b47fe1da-877a-4d5e-a21b-c7955bd00b30-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.734254 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"235041d8-b61c-4f18-b352-c07ae69e5c49","Type":"ContainerStarted","Data":"8f68d0e8c69ee6303a4ae0263486d6f240c009e3360e4375ca784cbc05abb1f2"} Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.736474 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5d455558d5-f58qc" event={"ID":"f0dcef7e-0621-4399-b967-5d5f90dd695f","Type":"ContainerStarted","Data":"1aa3d5b8500289d4b0c90a30cf8bafcab60eff06444efc230bf3add825a653d0"} Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.741924 5003 generic.go:334] "Generic (PLEG): container finished" podID="2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9" containerID="d5ee02e8789ea45820be9e25fe271db2a857486e82f782c830a592e23709ac31" exitCode=0 Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.741997 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66cdd4b5b5-z7rpl" event={"ID":"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9","Type":"ContainerDied","Data":"d5ee02e8789ea45820be9e25fe271db2a857486e82f782c830a592e23709ac31"} Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.742049 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66cdd4b5b5-z7rpl" event={"ID":"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9","Type":"ContainerStarted","Data":"535bbe4de27176bdd8077e7ff54de8d481b039b886d1bcac13d1b48ec97ae0d3"} Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.746971 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-2zm85" event={"ID":"b47fe1da-877a-4d5e-a21b-c7955bd00b30","Type":"ContainerDied","Data":"782d0879abd3c41319ef1fbae968d0335eec61a1791349e968383a980dff9f56"} Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.747038 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="782d0879abd3c41319ef1fbae968d0335eec61a1791349e968383a980dff9f56" Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.747125 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-2zm85" Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.748346 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-644b6c944d-sd84t" event={"ID":"a29676ba-4d56-4b2e-a92f-c83b5f25345a","Type":"ContainerStarted","Data":"2ef4f78a1357e56fbf5d7f6e592a25c5326254b62061d4c1bf5e37cf68190465"} Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.763580 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-69bb8d8f84-sdsn4" event={"ID":"1a785b98-dc58-469e-b1fd-4dba03f79d63","Type":"ContainerStarted","Data":"e7c254de6716144cd491d99149d98905d172d0db9c6882e1846b5c67a1f49837"} Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.763634 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-69bb8d8f84-sdsn4" event={"ID":"1a785b98-dc58-469e-b1fd-4dba03f79d63","Type":"ContainerStarted","Data":"6029bc8416ed350837a9ee4bb69bd0465579a5bf564ff938313ee525de4f5c59"} Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.763645 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-69bb8d8f84-sdsn4" event={"ID":"1a785b98-dc58-469e-b1fd-4dba03f79d63","Type":"ContainerStarted","Data":"0469edb717d134607e05a2e5ebaf68614da6112ae9c5a65082ff579ed0d27c1d"} Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.764047 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-69bb8d8f84-sdsn4" Jan 04 12:10:11 crc kubenswrapper[5003]: I0104 12:10:11.764721 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-69bb8d8f84-sdsn4" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.050071 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-69bb8d8f84-sdsn4" podStartSLOduration=2.040994882 podStartE2EDuration="2.040994882s" podCreationTimestamp="2026-01-04 12:10:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:10:11.803718731 +0000 UTC m=+1327.276748572" watchObservedRunningTime="2026-01-04 12:10:12.040994882 +0000 UTC m=+1327.514024723" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.050659 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-66cdd4b5b5-z7rpl"] Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.064285 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Jan 04 12:10:12 crc kubenswrapper[5003]: E0104 12:10:12.064796 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b47fe1da-877a-4d5e-a21b-c7955bd00b30" containerName="cinder-db-sync" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.064813 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b47fe1da-877a-4d5e-a21b-c7955bd00b30" containerName="cinder-db-sync" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.065075 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b47fe1da-877a-4d5e-a21b-c7955bd00b30" containerName="cinder-db-sync" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.066107 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.077671 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-5v97r" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.077981 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.078110 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.078255 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.088695 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-75dbb546bf-qzckq"] Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.096096 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75dbb546bf-qzckq" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.097997 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.126983 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75dbb546bf-qzckq"] Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.199867 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0eb0eeca-c60e-42b8-8df8-6560b563d9c0\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.199931 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0f8f3494-b6ed-4200-9e60-32d895c76f6a-ovsdbserver-nb\") pod \"dnsmasq-dns-75dbb546bf-qzckq\" (UID: \"0f8f3494-b6ed-4200-9e60-32d895c76f6a\") " pod="openstack/dnsmasq-dns-75dbb546bf-qzckq" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.199951 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0eb0eeca-c60e-42b8-8df8-6560b563d9c0\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.199969 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0eb0eeca-c60e-42b8-8df8-6560b563d9c0\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.199987 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0f8f3494-b6ed-4200-9e60-32d895c76f6a-ovsdbserver-sb\") pod \"dnsmasq-dns-75dbb546bf-qzckq\" (UID: \"0f8f3494-b6ed-4200-9e60-32d895c76f6a\") " pod="openstack/dnsmasq-dns-75dbb546bf-qzckq" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.200041 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zc5rg\" (UniqueName: \"kubernetes.io/projected/0f8f3494-b6ed-4200-9e60-32d895c76f6a-kube-api-access-zc5rg\") pod \"dnsmasq-dns-75dbb546bf-qzckq\" (UID: \"0f8f3494-b6ed-4200-9e60-32d895c76f6a\") " pod="openstack/dnsmasq-dns-75dbb546bf-qzckq" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.200073 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-scripts\") pod \"cinder-scheduler-0\" (UID: \"0eb0eeca-c60e-42b8-8df8-6560b563d9c0\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.200098 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0f8f3494-b6ed-4200-9e60-32d895c76f6a-dns-swift-storage-0\") pod \"dnsmasq-dns-75dbb546bf-qzckq\" (UID: \"0f8f3494-b6ed-4200-9e60-32d895c76f6a\") " pod="openstack/dnsmasq-dns-75dbb546bf-qzckq" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.200140 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0f8f3494-b6ed-4200-9e60-32d895c76f6a-dns-svc\") pod \"dnsmasq-dns-75dbb546bf-qzckq\" (UID: \"0f8f3494-b6ed-4200-9e60-32d895c76f6a\") " pod="openstack/dnsmasq-dns-75dbb546bf-qzckq" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.200177 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k252b\" (UniqueName: \"kubernetes.io/projected/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-kube-api-access-k252b\") pod \"cinder-scheduler-0\" (UID: \"0eb0eeca-c60e-42b8-8df8-6560b563d9c0\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.200214 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-config-data\") pod \"cinder-scheduler-0\" (UID: \"0eb0eeca-c60e-42b8-8df8-6560b563d9c0\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.200234 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f8f3494-b6ed-4200-9e60-32d895c76f6a-config\") pod \"dnsmasq-dns-75dbb546bf-qzckq\" (UID: \"0f8f3494-b6ed-4200-9e60-32d895c76f6a\") " pod="openstack/dnsmasq-dns-75dbb546bf-qzckq" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.294281 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.295863 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.299294 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.304097 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k252b\" (UniqueName: \"kubernetes.io/projected/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-kube-api-access-k252b\") pod \"cinder-scheduler-0\" (UID: \"0eb0eeca-c60e-42b8-8df8-6560b563d9c0\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.304169 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-config-data\") pod \"cinder-scheduler-0\" (UID: \"0eb0eeca-c60e-42b8-8df8-6560b563d9c0\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.304200 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f8f3494-b6ed-4200-9e60-32d895c76f6a-config\") pod \"dnsmasq-dns-75dbb546bf-qzckq\" (UID: \"0f8f3494-b6ed-4200-9e60-32d895c76f6a\") " pod="openstack/dnsmasq-dns-75dbb546bf-qzckq" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.304246 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0eb0eeca-c60e-42b8-8df8-6560b563d9c0\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.304279 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0f8f3494-b6ed-4200-9e60-32d895c76f6a-ovsdbserver-nb\") pod \"dnsmasq-dns-75dbb546bf-qzckq\" (UID: \"0f8f3494-b6ed-4200-9e60-32d895c76f6a\") " pod="openstack/dnsmasq-dns-75dbb546bf-qzckq" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.304305 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0eb0eeca-c60e-42b8-8df8-6560b563d9c0\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.304323 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0eb0eeca-c60e-42b8-8df8-6560b563d9c0\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.304344 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0f8f3494-b6ed-4200-9e60-32d895c76f6a-ovsdbserver-sb\") pod \"dnsmasq-dns-75dbb546bf-qzckq\" (UID: \"0f8f3494-b6ed-4200-9e60-32d895c76f6a\") " pod="openstack/dnsmasq-dns-75dbb546bf-qzckq" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.304389 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zc5rg\" (UniqueName: \"kubernetes.io/projected/0f8f3494-b6ed-4200-9e60-32d895c76f6a-kube-api-access-zc5rg\") pod \"dnsmasq-dns-75dbb546bf-qzckq\" (UID: \"0f8f3494-b6ed-4200-9e60-32d895c76f6a\") " pod="openstack/dnsmasq-dns-75dbb546bf-qzckq" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.304416 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-scripts\") pod \"cinder-scheduler-0\" (UID: \"0eb0eeca-c60e-42b8-8df8-6560b563d9c0\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.304445 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0f8f3494-b6ed-4200-9e60-32d895c76f6a-dns-swift-storage-0\") pod \"dnsmasq-dns-75dbb546bf-qzckq\" (UID: \"0f8f3494-b6ed-4200-9e60-32d895c76f6a\") " pod="openstack/dnsmasq-dns-75dbb546bf-qzckq" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.304492 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0f8f3494-b6ed-4200-9e60-32d895c76f6a-dns-svc\") pod \"dnsmasq-dns-75dbb546bf-qzckq\" (UID: \"0f8f3494-b6ed-4200-9e60-32d895c76f6a\") " pod="openstack/dnsmasq-dns-75dbb546bf-qzckq" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.305523 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0f8f3494-b6ed-4200-9e60-32d895c76f6a-dns-svc\") pod \"dnsmasq-dns-75dbb546bf-qzckq\" (UID: \"0f8f3494-b6ed-4200-9e60-32d895c76f6a\") " pod="openstack/dnsmasq-dns-75dbb546bf-qzckq" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.321305 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0f8f3494-b6ed-4200-9e60-32d895c76f6a-ovsdbserver-sb\") pod \"dnsmasq-dns-75dbb546bf-qzckq\" (UID: \"0f8f3494-b6ed-4200-9e60-32d895c76f6a\") " pod="openstack/dnsmasq-dns-75dbb546bf-qzckq" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.321380 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0eb0eeca-c60e-42b8-8df8-6560b563d9c0\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.321447 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0f8f3494-b6ed-4200-9e60-32d895c76f6a-dns-swift-storage-0\") pod \"dnsmasq-dns-75dbb546bf-qzckq\" (UID: \"0f8f3494-b6ed-4200-9e60-32d895c76f6a\") " pod="openstack/dnsmasq-dns-75dbb546bf-qzckq" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.321983 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f8f3494-b6ed-4200-9e60-32d895c76f6a-config\") pod \"dnsmasq-dns-75dbb546bf-qzckq\" (UID: \"0f8f3494-b6ed-4200-9e60-32d895c76f6a\") " pod="openstack/dnsmasq-dns-75dbb546bf-qzckq" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.322082 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0f8f3494-b6ed-4200-9e60-32d895c76f6a-ovsdbserver-nb\") pod \"dnsmasq-dns-75dbb546bf-qzckq\" (UID: \"0f8f3494-b6ed-4200-9e60-32d895c76f6a\") " pod="openstack/dnsmasq-dns-75dbb546bf-qzckq" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.322353 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.325745 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-config-data\") pod \"cinder-scheduler-0\" (UID: \"0eb0eeca-c60e-42b8-8df8-6560b563d9c0\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.328765 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0eb0eeca-c60e-42b8-8df8-6560b563d9c0\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.329857 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-scripts\") pod \"cinder-scheduler-0\" (UID: \"0eb0eeca-c60e-42b8-8df8-6560b563d9c0\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.330523 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0eb0eeca-c60e-42b8-8df8-6560b563d9c0\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.347569 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zc5rg\" (UniqueName: \"kubernetes.io/projected/0f8f3494-b6ed-4200-9e60-32d895c76f6a-kube-api-access-zc5rg\") pod \"dnsmasq-dns-75dbb546bf-qzckq\" (UID: \"0f8f3494-b6ed-4200-9e60-32d895c76f6a\") " pod="openstack/dnsmasq-dns-75dbb546bf-qzckq" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.357687 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k252b\" (UniqueName: \"kubernetes.io/projected/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-kube-api-access-k252b\") pod \"cinder-scheduler-0\" (UID: \"0eb0eeca-c60e-42b8-8df8-6560b563d9c0\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.407436 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32d41721-ce54-4695-930a-d66c8b2b2050-config-data\") pod \"cinder-api-0\" (UID: \"32d41721-ce54-4695-930a-d66c8b2b2050\") " pod="openstack/cinder-api-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.407513 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlmlz\" (UniqueName: \"kubernetes.io/projected/32d41721-ce54-4695-930a-d66c8b2b2050-kube-api-access-zlmlz\") pod \"cinder-api-0\" (UID: \"32d41721-ce54-4695-930a-d66c8b2b2050\") " pod="openstack/cinder-api-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.407545 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32d41721-ce54-4695-930a-d66c8b2b2050-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"32d41721-ce54-4695-930a-d66c8b2b2050\") " pod="openstack/cinder-api-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.407621 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/32d41721-ce54-4695-930a-d66c8b2b2050-etc-machine-id\") pod \"cinder-api-0\" (UID: \"32d41721-ce54-4695-930a-d66c8b2b2050\") " pod="openstack/cinder-api-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.407678 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32d41721-ce54-4695-930a-d66c8b2b2050-scripts\") pod \"cinder-api-0\" (UID: \"32d41721-ce54-4695-930a-d66c8b2b2050\") " pod="openstack/cinder-api-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.407700 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/32d41721-ce54-4695-930a-d66c8b2b2050-config-data-custom\") pod \"cinder-api-0\" (UID: \"32d41721-ce54-4695-930a-d66c8b2b2050\") " pod="openstack/cinder-api-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.407734 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32d41721-ce54-4695-930a-d66c8b2b2050-logs\") pod \"cinder-api-0\" (UID: \"32d41721-ce54-4695-930a-d66c8b2b2050\") " pod="openstack/cinder-api-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.423850 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.434446 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75dbb546bf-qzckq" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.509079 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/32d41721-ce54-4695-930a-d66c8b2b2050-config-data-custom\") pod \"cinder-api-0\" (UID: \"32d41721-ce54-4695-930a-d66c8b2b2050\") " pod="openstack/cinder-api-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.509151 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32d41721-ce54-4695-930a-d66c8b2b2050-logs\") pod \"cinder-api-0\" (UID: \"32d41721-ce54-4695-930a-d66c8b2b2050\") " pod="openstack/cinder-api-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.509189 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32d41721-ce54-4695-930a-d66c8b2b2050-config-data\") pod \"cinder-api-0\" (UID: \"32d41721-ce54-4695-930a-d66c8b2b2050\") " pod="openstack/cinder-api-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.509229 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlmlz\" (UniqueName: \"kubernetes.io/projected/32d41721-ce54-4695-930a-d66c8b2b2050-kube-api-access-zlmlz\") pod \"cinder-api-0\" (UID: \"32d41721-ce54-4695-930a-d66c8b2b2050\") " pod="openstack/cinder-api-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.509257 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32d41721-ce54-4695-930a-d66c8b2b2050-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"32d41721-ce54-4695-930a-d66c8b2b2050\") " pod="openstack/cinder-api-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.509300 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/32d41721-ce54-4695-930a-d66c8b2b2050-etc-machine-id\") pod \"cinder-api-0\" (UID: \"32d41721-ce54-4695-930a-d66c8b2b2050\") " pod="openstack/cinder-api-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.509352 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32d41721-ce54-4695-930a-d66c8b2b2050-scripts\") pod \"cinder-api-0\" (UID: \"32d41721-ce54-4695-930a-d66c8b2b2050\") " pod="openstack/cinder-api-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.510391 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/32d41721-ce54-4695-930a-d66c8b2b2050-etc-machine-id\") pod \"cinder-api-0\" (UID: \"32d41721-ce54-4695-930a-d66c8b2b2050\") " pod="openstack/cinder-api-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.510753 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32d41721-ce54-4695-930a-d66c8b2b2050-logs\") pod \"cinder-api-0\" (UID: \"32d41721-ce54-4695-930a-d66c8b2b2050\") " pod="openstack/cinder-api-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.517413 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32d41721-ce54-4695-930a-d66c8b2b2050-scripts\") pod \"cinder-api-0\" (UID: \"32d41721-ce54-4695-930a-d66c8b2b2050\") " pod="openstack/cinder-api-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.517695 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/32d41721-ce54-4695-930a-d66c8b2b2050-config-data-custom\") pod \"cinder-api-0\" (UID: \"32d41721-ce54-4695-930a-d66c8b2b2050\") " pod="openstack/cinder-api-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.517951 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32d41721-ce54-4695-930a-d66c8b2b2050-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"32d41721-ce54-4695-930a-d66c8b2b2050\") " pod="openstack/cinder-api-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.527317 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32d41721-ce54-4695-930a-d66c8b2b2050-config-data\") pod \"cinder-api-0\" (UID: \"32d41721-ce54-4695-930a-d66c8b2b2050\") " pod="openstack/cinder-api-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.528399 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlmlz\" (UniqueName: \"kubernetes.io/projected/32d41721-ce54-4695-930a-d66c8b2b2050-kube-api-access-zlmlz\") pod \"cinder-api-0\" (UID: \"32d41721-ce54-4695-930a-d66c8b2b2050\") " pod="openstack/cinder-api-0" Jan 04 12:10:12 crc kubenswrapper[5003]: I0104 12:10:12.719044 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 04 12:10:13 crc kubenswrapper[5003]: I0104 12:10:13.701159 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 04 12:10:13 crc kubenswrapper[5003]: I0104 12:10:13.799093 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-644b6c944d-sd84t" event={"ID":"a29676ba-4d56-4b2e-a92f-c83b5f25345a","Type":"ContainerStarted","Data":"f25eb8eb7f0c672012e6c95abd29402fdd548aeb978e465c3f54b8838db97478"} Jan 04 12:10:13 crc kubenswrapper[5003]: I0104 12:10:13.802594 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"235041d8-b61c-4f18-b352-c07ae69e5c49","Type":"ContainerStarted","Data":"62db70e3669f5fcb779c3b4b14cfdd8579a7b22e6e378d1a2e0afbf72dc62065"} Jan 04 12:10:13 crc kubenswrapper[5003]: I0104 12:10:13.803509 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 04 12:10:13 crc kubenswrapper[5003]: I0104 12:10:13.813735 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0eb0eeca-c60e-42b8-8df8-6560b563d9c0","Type":"ContainerStarted","Data":"9c9350b6f3914f073ac5750de7670d6ca7d945ec13dbfad157d4c9ba724288a8"} Jan 04 12:10:13 crc kubenswrapper[5003]: I0104 12:10:13.821314 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5d455558d5-f58qc" event={"ID":"f0dcef7e-0621-4399-b967-5d5f90dd695f","Type":"ContainerStarted","Data":"b3d17d863faaeeeaa347265306da2ca818931952ebab5c074c74f4eaf33efddb"} Jan 04 12:10:13 crc kubenswrapper[5003]: I0104 12:10:13.835963 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66cdd4b5b5-z7rpl" event={"ID":"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9","Type":"ContainerStarted","Data":"49f2a834542a3269343281e2f33c5f2ca7906f475027d0a6f3b0417b9474338f"} Jan 04 12:10:13 crc kubenswrapper[5003]: I0104 12:10:13.836272 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-66cdd4b5b5-z7rpl" podUID="2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9" containerName="dnsmasq-dns" containerID="cri-o://49f2a834542a3269343281e2f33c5f2ca7906f475027d0a6f3b0417b9474338f" gracePeriod=10 Jan 04 12:10:13 crc kubenswrapper[5003]: I0104 12:10:13.836380 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-66cdd4b5b5-z7rpl" Jan 04 12:10:13 crc kubenswrapper[5003]: I0104 12:10:13.861212 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.138288629 podStartE2EDuration="6.861187603s" podCreationTimestamp="2026-01-04 12:10:07 +0000 UTC" firstStartedPulling="2026-01-04 12:10:08.56629432 +0000 UTC m=+1324.039324151" lastFinishedPulling="2026-01-04 12:10:13.289193284 +0000 UTC m=+1328.762223125" observedRunningTime="2026-01-04 12:10:13.833840938 +0000 UTC m=+1329.306870789" watchObservedRunningTime="2026-01-04 12:10:13.861187603 +0000 UTC m=+1329.334217444" Jan 04 12:10:13 crc kubenswrapper[5003]: I0104 12:10:13.866722 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-75dbb546bf-qzckq"] Jan 04 12:10:13 crc kubenswrapper[5003]: I0104 12:10:13.870671 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-66cdd4b5b5-z7rpl" podStartSLOduration=3.87064813 podStartE2EDuration="3.87064813s" podCreationTimestamp="2026-01-04 12:10:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:10:13.862781745 +0000 UTC m=+1329.335811596" watchObservedRunningTime="2026-01-04 12:10:13.87064813 +0000 UTC m=+1329.343677971" Jan 04 12:10:13 crc kubenswrapper[5003]: W0104 12:10:13.879865 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0f8f3494_b6ed_4200_9e60_32d895c76f6a.slice/crio-eda91c171cc29023d65503c9cd17383f2854f0efb01135c55d84d1c70f720e43 WatchSource:0}: Error finding container eda91c171cc29023d65503c9cd17383f2854f0efb01135c55d84d1c70f720e43: Status 404 returned error can't find the container with id eda91c171cc29023d65503c9cd17383f2854f0efb01135c55d84d1c70f720e43 Jan 04 12:10:13 crc kubenswrapper[5003]: W0104 12:10:13.889162 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod32d41721_ce54_4695_930a_d66c8b2b2050.slice/crio-c16e5b7bb0bad79c9d561c2443c7e32eb07c41a211156d0ffef5683462c495e0 WatchSource:0}: Error finding container c16e5b7bb0bad79c9d561c2443c7e32eb07c41a211156d0ffef5683462c495e0: Status 404 returned error can't find the container with id c16e5b7bb0bad79c9d561c2443c7e32eb07c41a211156d0ffef5683462c495e0 Jan 04 12:10:13 crc kubenswrapper[5003]: I0104 12:10:13.889991 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 04 12:10:14 crc kubenswrapper[5003]: I0104 12:10:14.550216 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66cdd4b5b5-z7rpl" Jan 04 12:10:14 crc kubenswrapper[5003]: I0104 12:10:14.659550 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-config\") pod \"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9\" (UID: \"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9\") " Jan 04 12:10:14 crc kubenswrapper[5003]: I0104 12:10:14.659640 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-ovsdbserver-nb\") pod \"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9\" (UID: \"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9\") " Jan 04 12:10:14 crc kubenswrapper[5003]: I0104 12:10:14.659778 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sp7qv\" (UniqueName: \"kubernetes.io/projected/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-kube-api-access-sp7qv\") pod \"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9\" (UID: \"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9\") " Jan 04 12:10:14 crc kubenswrapper[5003]: I0104 12:10:14.659806 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-dns-svc\") pod \"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9\" (UID: \"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9\") " Jan 04 12:10:14 crc kubenswrapper[5003]: I0104 12:10:14.659907 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-ovsdbserver-sb\") pod \"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9\" (UID: \"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9\") " Jan 04 12:10:14 crc kubenswrapper[5003]: I0104 12:10:14.659938 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-dns-swift-storage-0\") pod \"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9\" (UID: \"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9\") " Jan 04 12:10:14 crc kubenswrapper[5003]: I0104 12:10:14.669163 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-kube-api-access-sp7qv" (OuterVolumeSpecName: "kube-api-access-sp7qv") pod "2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9" (UID: "2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9"). InnerVolumeSpecName "kube-api-access-sp7qv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:14 crc kubenswrapper[5003]: I0104 12:10:14.722340 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9" (UID: "2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:10:14 crc kubenswrapper[5003]: I0104 12:10:14.730472 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9" (UID: "2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:10:14 crc kubenswrapper[5003]: I0104 12:10:14.732666 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9" (UID: "2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:10:14 crc kubenswrapper[5003]: I0104 12:10:14.757820 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9" (UID: "2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:10:14 crc kubenswrapper[5003]: I0104 12:10:14.762617 5003 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:14 crc kubenswrapper[5003]: I0104 12:10:14.762650 5003 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:14 crc kubenswrapper[5003]: I0104 12:10:14.762666 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sp7qv\" (UniqueName: \"kubernetes.io/projected/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-kube-api-access-sp7qv\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:14 crc kubenswrapper[5003]: I0104 12:10:14.762678 5003 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:14 crc kubenswrapper[5003]: I0104 12:10:14.762687 5003 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:14 crc kubenswrapper[5003]: I0104 12:10:14.801595 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-config" (OuterVolumeSpecName: "config") pod "2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9" (UID: "2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:10:14 crc kubenswrapper[5003]: I0104 12:10:14.859222 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-644b6c944d-sd84t" event={"ID":"a29676ba-4d56-4b2e-a92f-c83b5f25345a","Type":"ContainerStarted","Data":"450844f0ccdffc6d7d907a51da1ae77807337385f064e077ac7579465c3daaf4"} Jan 04 12:10:14 crc kubenswrapper[5003]: I0104 12:10:14.866173 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:14 crc kubenswrapper[5003]: I0104 12:10:14.890195 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"32d41721-ce54-4695-930a-d66c8b2b2050","Type":"ContainerStarted","Data":"b0e2e827d90c1a68f1e290b06388bc0b3db1d393defb3f0ecc4c58d53ee4cd9a"} Jan 04 12:10:14 crc kubenswrapper[5003]: I0104 12:10:14.890242 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"32d41721-ce54-4695-930a-d66c8b2b2050","Type":"ContainerStarted","Data":"c16e5b7bb0bad79c9d561c2443c7e32eb07c41a211156d0ffef5683462c495e0"} Jan 04 12:10:14 crc kubenswrapper[5003]: I0104 12:10:14.893899 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5d455558d5-f58qc" event={"ID":"f0dcef7e-0621-4399-b967-5d5f90dd695f","Type":"ContainerStarted","Data":"d3cbe354dea063f86f1a93a83f133720e47dd24a58d27a2d2bdf3cd839088357"} Jan 04 12:10:14 crc kubenswrapper[5003]: I0104 12:10:14.908275 5003 generic.go:334] "Generic (PLEG): container finished" podID="2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9" containerID="49f2a834542a3269343281e2f33c5f2ca7906f475027d0a6f3b0417b9474338f" exitCode=0 Jan 04 12:10:14 crc kubenswrapper[5003]: I0104 12:10:14.908391 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66cdd4b5b5-z7rpl" event={"ID":"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9","Type":"ContainerDied","Data":"49f2a834542a3269343281e2f33c5f2ca7906f475027d0a6f3b0417b9474338f"} Jan 04 12:10:14 crc kubenswrapper[5003]: I0104 12:10:14.908425 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66cdd4b5b5-z7rpl" event={"ID":"2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9","Type":"ContainerDied","Data":"535bbe4de27176bdd8077e7ff54de8d481b039b886d1bcac13d1b48ec97ae0d3"} Jan 04 12:10:14 crc kubenswrapper[5003]: I0104 12:10:14.908443 5003 scope.go:117] "RemoveContainer" containerID="49f2a834542a3269343281e2f33c5f2ca7906f475027d0a6f3b0417b9474338f" Jan 04 12:10:14 crc kubenswrapper[5003]: I0104 12:10:14.908632 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66cdd4b5b5-z7rpl" Jan 04 12:10:14 crc kubenswrapper[5003]: I0104 12:10:14.932574 5003 generic.go:334] "Generic (PLEG): container finished" podID="0f8f3494-b6ed-4200-9e60-32d895c76f6a" containerID="166764e8d75c8ee2f0445664874964a0d41198a944591c07ed1ccd2289360373" exitCode=0 Jan 04 12:10:14 crc kubenswrapper[5003]: I0104 12:10:14.934133 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75dbb546bf-qzckq" event={"ID":"0f8f3494-b6ed-4200-9e60-32d895c76f6a","Type":"ContainerDied","Data":"166764e8d75c8ee2f0445664874964a0d41198a944591c07ed1ccd2289360373"} Jan 04 12:10:14 crc kubenswrapper[5003]: I0104 12:10:14.934169 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75dbb546bf-qzckq" event={"ID":"0f8f3494-b6ed-4200-9e60-32d895c76f6a","Type":"ContainerStarted","Data":"eda91c171cc29023d65503c9cd17383f2854f0efb01135c55d84d1c70f720e43"} Jan 04 12:10:15 crc kubenswrapper[5003]: I0104 12:10:15.025765 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-66cdd4b5b5-z7rpl"] Jan 04 12:10:15 crc kubenswrapper[5003]: I0104 12:10:15.068299 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-66cdd4b5b5-z7rpl"] Jan 04 12:10:15 crc kubenswrapper[5003]: I0104 12:10:15.085681 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-644b6c944d-sd84t" podStartSLOduration=2.84901268 podStartE2EDuration="5.085656225s" podCreationTimestamp="2026-01-04 12:10:10 +0000 UTC" firstStartedPulling="2026-01-04 12:10:11.053204736 +0000 UTC m=+1326.526234577" lastFinishedPulling="2026-01-04 12:10:13.289848281 +0000 UTC m=+1328.762878122" observedRunningTime="2026-01-04 12:10:14.983129926 +0000 UTC m=+1330.456159787" watchObservedRunningTime="2026-01-04 12:10:15.085656225 +0000 UTC m=+1330.558686076" Jan 04 12:10:15 crc kubenswrapper[5003]: I0104 12:10:15.104173 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-5d455558d5-f58qc" podStartSLOduration=3.607944111 podStartE2EDuration="6.104150819s" podCreationTimestamp="2026-01-04 12:10:09 +0000 UTC" firstStartedPulling="2026-01-04 12:10:10.833956296 +0000 UTC m=+1326.306986137" lastFinishedPulling="2026-01-04 12:10:13.330163004 +0000 UTC m=+1328.803192845" observedRunningTime="2026-01-04 12:10:15.036815969 +0000 UTC m=+1330.509845810" watchObservedRunningTime="2026-01-04 12:10:15.104150819 +0000 UTC m=+1330.577180680" Jan 04 12:10:15 crc kubenswrapper[5003]: I0104 12:10:15.173918 5003 scope.go:117] "RemoveContainer" containerID="d5ee02e8789ea45820be9e25fe271db2a857486e82f782c830a592e23709ac31" Jan 04 12:10:15 crc kubenswrapper[5003]: I0104 12:10:15.219205 5003 scope.go:117] "RemoveContainer" containerID="49f2a834542a3269343281e2f33c5f2ca7906f475027d0a6f3b0417b9474338f" Jan 04 12:10:15 crc kubenswrapper[5003]: E0104 12:10:15.219679 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49f2a834542a3269343281e2f33c5f2ca7906f475027d0a6f3b0417b9474338f\": container with ID starting with 49f2a834542a3269343281e2f33c5f2ca7906f475027d0a6f3b0417b9474338f not found: ID does not exist" containerID="49f2a834542a3269343281e2f33c5f2ca7906f475027d0a6f3b0417b9474338f" Jan 04 12:10:15 crc kubenswrapper[5003]: I0104 12:10:15.219725 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49f2a834542a3269343281e2f33c5f2ca7906f475027d0a6f3b0417b9474338f"} err="failed to get container status \"49f2a834542a3269343281e2f33c5f2ca7906f475027d0a6f3b0417b9474338f\": rpc error: code = NotFound desc = could not find container \"49f2a834542a3269343281e2f33c5f2ca7906f475027d0a6f3b0417b9474338f\": container with ID starting with 49f2a834542a3269343281e2f33c5f2ca7906f475027d0a6f3b0417b9474338f not found: ID does not exist" Jan 04 12:10:15 crc kubenswrapper[5003]: I0104 12:10:15.219749 5003 scope.go:117] "RemoveContainer" containerID="d5ee02e8789ea45820be9e25fe271db2a857486e82f782c830a592e23709ac31" Jan 04 12:10:15 crc kubenswrapper[5003]: E0104 12:10:15.219987 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5ee02e8789ea45820be9e25fe271db2a857486e82f782c830a592e23709ac31\": container with ID starting with d5ee02e8789ea45820be9e25fe271db2a857486e82f782c830a592e23709ac31 not found: ID does not exist" containerID="d5ee02e8789ea45820be9e25fe271db2a857486e82f782c830a592e23709ac31" Jan 04 12:10:15 crc kubenswrapper[5003]: I0104 12:10:15.220004 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5ee02e8789ea45820be9e25fe271db2a857486e82f782c830a592e23709ac31"} err="failed to get container status \"d5ee02e8789ea45820be9e25fe271db2a857486e82f782c830a592e23709ac31\": rpc error: code = NotFound desc = could not find container \"d5ee02e8789ea45820be9e25fe271db2a857486e82f782c830a592e23709ac31\": container with ID starting with d5ee02e8789ea45820be9e25fe271db2a857486e82f782c830a592e23709ac31 not found: ID does not exist" Jan 04 12:10:15 crc kubenswrapper[5003]: I0104 12:10:15.996807 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"32d41721-ce54-4695-930a-d66c8b2b2050","Type":"ContainerStarted","Data":"7dce8e8b75190a2282e82e701a7d7156f5b6e9647002a66eb3985aee6cf5d650"} Jan 04 12:10:15 crc kubenswrapper[5003]: I0104 12:10:15.997886 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Jan 04 12:10:16 crc kubenswrapper[5003]: I0104 12:10:16.016853 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0eb0eeca-c60e-42b8-8df8-6560b563d9c0","Type":"ContainerStarted","Data":"756f6499103740d47f9aabe90ee51e6d5537669adb0c428cee62f6f6a56bb89d"} Jan 04 12:10:16 crc kubenswrapper[5003]: I0104 12:10:16.030849 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.030807077 podStartE2EDuration="4.030807077s" podCreationTimestamp="2026-01-04 12:10:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:10:16.022297115 +0000 UTC m=+1331.495326976" watchObservedRunningTime="2026-01-04 12:10:16.030807077 +0000 UTC m=+1331.503836918" Jan 04 12:10:16 crc kubenswrapper[5003]: I0104 12:10:16.033629 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75dbb546bf-qzckq" event={"ID":"0f8f3494-b6ed-4200-9e60-32d895c76f6a","Type":"ContainerStarted","Data":"de96d3ecee90124a628c00b13197012153a0130bf34e1bbe2b0c0bdab385c56c"} Jan 04 12:10:16 crc kubenswrapper[5003]: I0104 12:10:16.034527 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-75dbb546bf-qzckq" Jan 04 12:10:16 crc kubenswrapper[5003]: I0104 12:10:16.078104 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-75dbb546bf-qzckq" podStartSLOduration=4.078082923 podStartE2EDuration="4.078082923s" podCreationTimestamp="2026-01-04 12:10:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:10:16.064849227 +0000 UTC m=+1331.537879078" watchObservedRunningTime="2026-01-04 12:10:16.078082923 +0000 UTC m=+1331.551112764" Jan 04 12:10:16 crc kubenswrapper[5003]: I0104 12:10:16.333714 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 04 12:10:16 crc kubenswrapper[5003]: I0104 12:10:16.817805 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9" path="/var/lib/kubelet/pods/2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9/volumes" Jan 04 12:10:17 crc kubenswrapper[5003]: I0104 12:10:17.042991 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0eb0eeca-c60e-42b8-8df8-6560b563d9c0","Type":"ContainerStarted","Data":"64c858dda652466a27ba0ec415a77aa142ff2e4c87f934d78cf0eda08d5cc0db"} Jan 04 12:10:17 crc kubenswrapper[5003]: I0104 12:10:17.072209 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.310325051 podStartE2EDuration="5.072184303s" podCreationTimestamp="2026-01-04 12:10:12 +0000 UTC" firstStartedPulling="2026-01-04 12:10:13.743111787 +0000 UTC m=+1329.216141628" lastFinishedPulling="2026-01-04 12:10:14.504971039 +0000 UTC m=+1329.978000880" observedRunningTime="2026-01-04 12:10:17.063153307 +0000 UTC m=+1332.536183148" watchObservedRunningTime="2026-01-04 12:10:17.072184303 +0000 UTC m=+1332.545214144" Jan 04 12:10:17 crc kubenswrapper[5003]: I0104 12:10:17.424447 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Jan 04 12:10:18 crc kubenswrapper[5003]: I0104 12:10:18.052917 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="32d41721-ce54-4695-930a-d66c8b2b2050" containerName="cinder-api-log" containerID="cri-o://b0e2e827d90c1a68f1e290b06388bc0b3db1d393defb3f0ecc4c58d53ee4cd9a" gracePeriod=30 Jan 04 12:10:18 crc kubenswrapper[5003]: I0104 12:10:18.053031 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="32d41721-ce54-4695-930a-d66c8b2b2050" containerName="cinder-api" containerID="cri-o://7dce8e8b75190a2282e82e701a7d7156f5b6e9647002a66eb3985aee6cf5d650" gracePeriod=30 Jan 04 12:10:18 crc kubenswrapper[5003]: I0104 12:10:18.454585 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-66fdccd748-zk2qt"] Jan 04 12:10:18 crc kubenswrapper[5003]: E0104 12:10:18.454971 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9" containerName="dnsmasq-dns" Jan 04 12:10:18 crc kubenswrapper[5003]: I0104 12:10:18.454990 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9" containerName="dnsmasq-dns" Jan 04 12:10:18 crc kubenswrapper[5003]: E0104 12:10:18.455037 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9" containerName="init" Jan 04 12:10:18 crc kubenswrapper[5003]: I0104 12:10:18.455046 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9" containerName="init" Jan 04 12:10:18 crc kubenswrapper[5003]: I0104 12:10:18.455223 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f4ff7e1-decf-4c17-bb79-f9b2f0d86bb9" containerName="dnsmasq-dns" Jan 04 12:10:18 crc kubenswrapper[5003]: I0104 12:10:18.456389 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-66fdccd748-zk2qt" Jan 04 12:10:18 crc kubenswrapper[5003]: I0104 12:10:18.459100 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Jan 04 12:10:18 crc kubenswrapper[5003]: I0104 12:10:18.459181 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Jan 04 12:10:18 crc kubenswrapper[5003]: I0104 12:10:18.474415 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-66fdccd748-zk2qt"] Jan 04 12:10:18 crc kubenswrapper[5003]: I0104 12:10:18.541314 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-internal-tls-certs\") pod \"barbican-api-66fdccd748-zk2qt\" (UID: \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\") " pod="openstack/barbican-api-66fdccd748-zk2qt" Jan 04 12:10:18 crc kubenswrapper[5003]: I0104 12:10:18.541699 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-logs\") pod \"barbican-api-66fdccd748-zk2qt\" (UID: \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\") " pod="openstack/barbican-api-66fdccd748-zk2qt" Jan 04 12:10:18 crc kubenswrapper[5003]: I0104 12:10:18.541723 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-combined-ca-bundle\") pod \"barbican-api-66fdccd748-zk2qt\" (UID: \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\") " pod="openstack/barbican-api-66fdccd748-zk2qt" Jan 04 12:10:18 crc kubenswrapper[5003]: I0104 12:10:18.541808 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-config-data-custom\") pod \"barbican-api-66fdccd748-zk2qt\" (UID: \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\") " pod="openstack/barbican-api-66fdccd748-zk2qt" Jan 04 12:10:18 crc kubenswrapper[5003]: I0104 12:10:18.541834 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mb4ms\" (UniqueName: \"kubernetes.io/projected/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-kube-api-access-mb4ms\") pod \"barbican-api-66fdccd748-zk2qt\" (UID: \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\") " pod="openstack/barbican-api-66fdccd748-zk2qt" Jan 04 12:10:18 crc kubenswrapper[5003]: I0104 12:10:18.541855 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-config-data\") pod \"barbican-api-66fdccd748-zk2qt\" (UID: \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\") " pod="openstack/barbican-api-66fdccd748-zk2qt" Jan 04 12:10:18 crc kubenswrapper[5003]: I0104 12:10:18.541895 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-public-tls-certs\") pod \"barbican-api-66fdccd748-zk2qt\" (UID: \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\") " pod="openstack/barbican-api-66fdccd748-zk2qt" Jan 04 12:10:18 crc kubenswrapper[5003]: I0104 12:10:18.643326 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-public-tls-certs\") pod \"barbican-api-66fdccd748-zk2qt\" (UID: \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\") " pod="openstack/barbican-api-66fdccd748-zk2qt" Jan 04 12:10:18 crc kubenswrapper[5003]: I0104 12:10:18.643388 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-internal-tls-certs\") pod \"barbican-api-66fdccd748-zk2qt\" (UID: \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\") " pod="openstack/barbican-api-66fdccd748-zk2qt" Jan 04 12:10:18 crc kubenswrapper[5003]: I0104 12:10:18.643428 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-combined-ca-bundle\") pod \"barbican-api-66fdccd748-zk2qt\" (UID: \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\") " pod="openstack/barbican-api-66fdccd748-zk2qt" Jan 04 12:10:18 crc kubenswrapper[5003]: I0104 12:10:18.643448 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-logs\") pod \"barbican-api-66fdccd748-zk2qt\" (UID: \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\") " pod="openstack/barbican-api-66fdccd748-zk2qt" Jan 04 12:10:18 crc kubenswrapper[5003]: I0104 12:10:18.643532 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-config-data-custom\") pod \"barbican-api-66fdccd748-zk2qt\" (UID: \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\") " pod="openstack/barbican-api-66fdccd748-zk2qt" Jan 04 12:10:18 crc kubenswrapper[5003]: I0104 12:10:18.643552 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mb4ms\" (UniqueName: \"kubernetes.io/projected/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-kube-api-access-mb4ms\") pod \"barbican-api-66fdccd748-zk2qt\" (UID: \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\") " pod="openstack/barbican-api-66fdccd748-zk2qt" Jan 04 12:10:18 crc kubenswrapper[5003]: I0104 12:10:18.643569 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-config-data\") pod \"barbican-api-66fdccd748-zk2qt\" (UID: \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\") " pod="openstack/barbican-api-66fdccd748-zk2qt" Jan 04 12:10:18 crc kubenswrapper[5003]: I0104 12:10:18.646113 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-logs\") pod \"barbican-api-66fdccd748-zk2qt\" (UID: \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\") " pod="openstack/barbican-api-66fdccd748-zk2qt" Jan 04 12:10:18 crc kubenswrapper[5003]: I0104 12:10:18.654426 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-config-data\") pod \"barbican-api-66fdccd748-zk2qt\" (UID: \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\") " pod="openstack/barbican-api-66fdccd748-zk2qt" Jan 04 12:10:18 crc kubenswrapper[5003]: I0104 12:10:18.655710 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-internal-tls-certs\") pod \"barbican-api-66fdccd748-zk2qt\" (UID: \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\") " pod="openstack/barbican-api-66fdccd748-zk2qt" Jan 04 12:10:18 crc kubenswrapper[5003]: I0104 12:10:18.658461 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-public-tls-certs\") pod \"barbican-api-66fdccd748-zk2qt\" (UID: \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\") " pod="openstack/barbican-api-66fdccd748-zk2qt" Jan 04 12:10:18 crc kubenswrapper[5003]: I0104 12:10:18.661373 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-config-data-custom\") pod \"barbican-api-66fdccd748-zk2qt\" (UID: \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\") " pod="openstack/barbican-api-66fdccd748-zk2qt" Jan 04 12:10:18 crc kubenswrapper[5003]: I0104 12:10:18.662232 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-combined-ca-bundle\") pod \"barbican-api-66fdccd748-zk2qt\" (UID: \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\") " pod="openstack/barbican-api-66fdccd748-zk2qt" Jan 04 12:10:18 crc kubenswrapper[5003]: I0104 12:10:18.667374 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mb4ms\" (UniqueName: \"kubernetes.io/projected/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-kube-api-access-mb4ms\") pod \"barbican-api-66fdccd748-zk2qt\" (UID: \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\") " pod="openstack/barbican-api-66fdccd748-zk2qt" Jan 04 12:10:18 crc kubenswrapper[5003]: I0104 12:10:18.779863 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-66fdccd748-zk2qt" Jan 04 12:10:19 crc kubenswrapper[5003]: I0104 12:10:19.067513 5003 generic.go:334] "Generic (PLEG): container finished" podID="32d41721-ce54-4695-930a-d66c8b2b2050" containerID="7dce8e8b75190a2282e82e701a7d7156f5b6e9647002a66eb3985aee6cf5d650" exitCode=0 Jan 04 12:10:19 crc kubenswrapper[5003]: I0104 12:10:19.067960 5003 generic.go:334] "Generic (PLEG): container finished" podID="32d41721-ce54-4695-930a-d66c8b2b2050" containerID="b0e2e827d90c1a68f1e290b06388bc0b3db1d393defb3f0ecc4c58d53ee4cd9a" exitCode=143 Jan 04 12:10:19 crc kubenswrapper[5003]: I0104 12:10:19.069125 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"32d41721-ce54-4695-930a-d66c8b2b2050","Type":"ContainerDied","Data":"7dce8e8b75190a2282e82e701a7d7156f5b6e9647002a66eb3985aee6cf5d650"} Jan 04 12:10:19 crc kubenswrapper[5003]: I0104 12:10:19.069189 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"32d41721-ce54-4695-930a-d66c8b2b2050","Type":"ContainerDied","Data":"b0e2e827d90c1a68f1e290b06388bc0b3db1d393defb3f0ecc4c58d53ee4cd9a"} Jan 04 12:10:19 crc kubenswrapper[5003]: I0104 12:10:19.284907 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-66fdccd748-zk2qt"] Jan 04 12:10:19 crc kubenswrapper[5003]: I0104 12:10:19.499229 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 04 12:10:19 crc kubenswrapper[5003]: I0104 12:10:19.576226 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32d41721-ce54-4695-930a-d66c8b2b2050-config-data\") pod \"32d41721-ce54-4695-930a-d66c8b2b2050\" (UID: \"32d41721-ce54-4695-930a-d66c8b2b2050\") " Jan 04 12:10:19 crc kubenswrapper[5003]: I0104 12:10:19.576409 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zlmlz\" (UniqueName: \"kubernetes.io/projected/32d41721-ce54-4695-930a-d66c8b2b2050-kube-api-access-zlmlz\") pod \"32d41721-ce54-4695-930a-d66c8b2b2050\" (UID: \"32d41721-ce54-4695-930a-d66c8b2b2050\") " Jan 04 12:10:19 crc kubenswrapper[5003]: I0104 12:10:19.576530 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32d41721-ce54-4695-930a-d66c8b2b2050-scripts\") pod \"32d41721-ce54-4695-930a-d66c8b2b2050\" (UID: \"32d41721-ce54-4695-930a-d66c8b2b2050\") " Jan 04 12:10:19 crc kubenswrapper[5003]: I0104 12:10:19.576568 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32d41721-ce54-4695-930a-d66c8b2b2050-combined-ca-bundle\") pod \"32d41721-ce54-4695-930a-d66c8b2b2050\" (UID: \"32d41721-ce54-4695-930a-d66c8b2b2050\") " Jan 04 12:10:19 crc kubenswrapper[5003]: I0104 12:10:19.576731 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/32d41721-ce54-4695-930a-d66c8b2b2050-config-data-custom\") pod \"32d41721-ce54-4695-930a-d66c8b2b2050\" (UID: \"32d41721-ce54-4695-930a-d66c8b2b2050\") " Jan 04 12:10:19 crc kubenswrapper[5003]: I0104 12:10:19.576975 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/32d41721-ce54-4695-930a-d66c8b2b2050-etc-machine-id\") pod \"32d41721-ce54-4695-930a-d66c8b2b2050\" (UID: \"32d41721-ce54-4695-930a-d66c8b2b2050\") " Jan 04 12:10:19 crc kubenswrapper[5003]: I0104 12:10:19.577309 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32d41721-ce54-4695-930a-d66c8b2b2050-logs\") pod \"32d41721-ce54-4695-930a-d66c8b2b2050\" (UID: \"32d41721-ce54-4695-930a-d66c8b2b2050\") " Jan 04 12:10:19 crc kubenswrapper[5003]: I0104 12:10:19.577741 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32d41721-ce54-4695-930a-d66c8b2b2050-logs" (OuterVolumeSpecName: "logs") pod "32d41721-ce54-4695-930a-d66c8b2b2050" (UID: "32d41721-ce54-4695-930a-d66c8b2b2050"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:10:19 crc kubenswrapper[5003]: I0104 12:10:19.577813 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/32d41721-ce54-4695-930a-d66c8b2b2050-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "32d41721-ce54-4695-930a-d66c8b2b2050" (UID: "32d41721-ce54-4695-930a-d66c8b2b2050"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:10:19 crc kubenswrapper[5003]: I0104 12:10:19.582712 5003 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/32d41721-ce54-4695-930a-d66c8b2b2050-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:19 crc kubenswrapper[5003]: I0104 12:10:19.582748 5003 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32d41721-ce54-4695-930a-d66c8b2b2050-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:19 crc kubenswrapper[5003]: I0104 12:10:19.589174 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32d41721-ce54-4695-930a-d66c8b2b2050-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "32d41721-ce54-4695-930a-d66c8b2b2050" (UID: "32d41721-ce54-4695-930a-d66c8b2b2050"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:19 crc kubenswrapper[5003]: I0104 12:10:19.608904 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32d41721-ce54-4695-930a-d66c8b2b2050-scripts" (OuterVolumeSpecName: "scripts") pod "32d41721-ce54-4695-930a-d66c8b2b2050" (UID: "32d41721-ce54-4695-930a-d66c8b2b2050"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:19 crc kubenswrapper[5003]: I0104 12:10:19.613367 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32d41721-ce54-4695-930a-d66c8b2b2050-kube-api-access-zlmlz" (OuterVolumeSpecName: "kube-api-access-zlmlz") pod "32d41721-ce54-4695-930a-d66c8b2b2050" (UID: "32d41721-ce54-4695-930a-d66c8b2b2050"). InnerVolumeSpecName "kube-api-access-zlmlz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:19 crc kubenswrapper[5003]: I0104 12:10:19.621479 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32d41721-ce54-4695-930a-d66c8b2b2050-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "32d41721-ce54-4695-930a-d66c8b2b2050" (UID: "32d41721-ce54-4695-930a-d66c8b2b2050"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:19 crc kubenswrapper[5003]: I0104 12:10:19.632420 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32d41721-ce54-4695-930a-d66c8b2b2050-config-data" (OuterVolumeSpecName: "config-data") pod "32d41721-ce54-4695-930a-d66c8b2b2050" (UID: "32d41721-ce54-4695-930a-d66c8b2b2050"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:19 crc kubenswrapper[5003]: I0104 12:10:19.690398 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32d41721-ce54-4695-930a-d66c8b2b2050-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:19 crc kubenswrapper[5003]: I0104 12:10:19.691251 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zlmlz\" (UniqueName: \"kubernetes.io/projected/32d41721-ce54-4695-930a-d66c8b2b2050-kube-api-access-zlmlz\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:19 crc kubenswrapper[5003]: I0104 12:10:19.691424 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32d41721-ce54-4695-930a-d66c8b2b2050-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:19 crc kubenswrapper[5003]: I0104 12:10:19.692214 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32d41721-ce54-4695-930a-d66c8b2b2050-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:19 crc kubenswrapper[5003]: I0104 12:10:19.692334 5003 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/32d41721-ce54-4695-930a-d66c8b2b2050-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.079916 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"32d41721-ce54-4695-930a-d66c8b2b2050","Type":"ContainerDied","Data":"c16e5b7bb0bad79c9d561c2443c7e32eb07c41a211156d0ffef5683462c495e0"} Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.079996 5003 scope.go:117] "RemoveContainer" containerID="7dce8e8b75190a2282e82e701a7d7156f5b6e9647002a66eb3985aee6cf5d650" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.081769 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.082121 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-66fdccd748-zk2qt" event={"ID":"b53805c6-4e15-4580-a60d-1f0c9c1fcef6","Type":"ContainerStarted","Data":"dd895a8362663704ae95aaa9cadf69c10b404ecae29d602906e53e472c16265f"} Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.082169 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-66fdccd748-zk2qt" event={"ID":"b53805c6-4e15-4580-a60d-1f0c9c1fcef6","Type":"ContainerStarted","Data":"8100c888907e0c359672d9aa57c58750ef202eac34f6410af86c42eefa66cc49"} Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.082211 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-66fdccd748-zk2qt" event={"ID":"b53805c6-4e15-4580-a60d-1f0c9c1fcef6","Type":"ContainerStarted","Data":"7ad830d8390930ebcfe985abb2a6cc13a55e16f03367ecdb17cdf43794420ce8"} Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.082444 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-66fdccd748-zk2qt" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.109477 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-66fdccd748-zk2qt" podStartSLOduration=2.109454793 podStartE2EDuration="2.109454793s" podCreationTimestamp="2026-01-04 12:10:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:10:20.108993091 +0000 UTC m=+1335.582022932" watchObservedRunningTime="2026-01-04 12:10:20.109454793 +0000 UTC m=+1335.582484634" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.112041 5003 scope.go:117] "RemoveContainer" containerID="b0e2e827d90c1a68f1e290b06388bc0b3db1d393defb3f0ecc4c58d53ee4cd9a" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.163089 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.189096 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.233902 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Jan 04 12:10:20 crc kubenswrapper[5003]: E0104 12:10:20.234372 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32d41721-ce54-4695-930a-d66c8b2b2050" containerName="cinder-api-log" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.234390 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="32d41721-ce54-4695-930a-d66c8b2b2050" containerName="cinder-api-log" Jan 04 12:10:20 crc kubenswrapper[5003]: E0104 12:10:20.234401 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32d41721-ce54-4695-930a-d66c8b2b2050" containerName="cinder-api" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.234408 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="32d41721-ce54-4695-930a-d66c8b2b2050" containerName="cinder-api" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.234581 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="32d41721-ce54-4695-930a-d66c8b2b2050" containerName="cinder-api-log" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.234602 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="32d41721-ce54-4695-930a-d66c8b2b2050" containerName="cinder-api" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.235577 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.237990 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.238505 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.238690 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.242610 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.310518 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " pod="openstack/cinder-api-0" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.310576 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-config-data\") pod \"cinder-api-0\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " pod="openstack/cinder-api-0" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.310609 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95w2v\" (UniqueName: \"kubernetes.io/projected/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-kube-api-access-95w2v\") pod \"cinder-api-0\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " pod="openstack/cinder-api-0" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.310633 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-public-tls-certs\") pod \"cinder-api-0\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " pod="openstack/cinder-api-0" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.311127 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " pod="openstack/cinder-api-0" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.311259 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-config-data-custom\") pod \"cinder-api-0\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " pod="openstack/cinder-api-0" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.311277 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-logs\") pod \"cinder-api-0\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " pod="openstack/cinder-api-0" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.311369 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " pod="openstack/cinder-api-0" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.311388 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-scripts\") pod \"cinder-api-0\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " pod="openstack/cinder-api-0" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.413379 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " pod="openstack/cinder-api-0" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.414249 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-config-data\") pod \"cinder-api-0\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " pod="openstack/cinder-api-0" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.414281 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95w2v\" (UniqueName: \"kubernetes.io/projected/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-kube-api-access-95w2v\") pod \"cinder-api-0\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " pod="openstack/cinder-api-0" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.414303 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-public-tls-certs\") pod \"cinder-api-0\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " pod="openstack/cinder-api-0" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.414385 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " pod="openstack/cinder-api-0" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.414429 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-config-data-custom\") pod \"cinder-api-0\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " pod="openstack/cinder-api-0" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.414445 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-logs\") pod \"cinder-api-0\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " pod="openstack/cinder-api-0" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.414501 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " pod="openstack/cinder-api-0" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.414528 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-scripts\") pod \"cinder-api-0\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " pod="openstack/cinder-api-0" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.415334 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-logs\") pod \"cinder-api-0\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " pod="openstack/cinder-api-0" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.415466 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " pod="openstack/cinder-api-0" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.419118 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-public-tls-certs\") pod \"cinder-api-0\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " pod="openstack/cinder-api-0" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.419397 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " pod="openstack/cinder-api-0" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.419727 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-scripts\") pod \"cinder-api-0\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " pod="openstack/cinder-api-0" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.419946 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-config-data-custom\") pod \"cinder-api-0\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " pod="openstack/cinder-api-0" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.424714 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " pod="openstack/cinder-api-0" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.433371 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-config-data\") pod \"cinder-api-0\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " pod="openstack/cinder-api-0" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.436972 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95w2v\" (UniqueName: \"kubernetes.io/projected/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-kube-api-access-95w2v\") pod \"cinder-api-0\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " pod="openstack/cinder-api-0" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.563145 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 04 12:10:20 crc kubenswrapper[5003]: I0104 12:10:20.820933 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32d41721-ce54-4695-930a-d66c8b2b2050" path="/var/lib/kubelet/pods/32d41721-ce54-4695-930a-d66c8b2b2050/volumes" Jan 04 12:10:21 crc kubenswrapper[5003]: I0104 12:10:21.062690 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Jan 04 12:10:21 crc kubenswrapper[5003]: I0104 12:10:21.106872 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2c899132-ee49-4ab3-89ea-95f0bfcb71ab","Type":"ContainerStarted","Data":"e7f8d7fa6a185457322a4d262490f9d28e3431b5d35bf629a605517ded7a3222"} Jan 04 12:10:21 crc kubenswrapper[5003]: I0104 12:10:21.109448 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-66fdccd748-zk2qt" Jan 04 12:10:21 crc kubenswrapper[5003]: I0104 12:10:21.957880 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-69bb8d8f84-sdsn4" Jan 04 12:10:22 crc kubenswrapper[5003]: I0104 12:10:22.133155 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2c899132-ee49-4ab3-89ea-95f0bfcb71ab","Type":"ContainerStarted","Data":"8dc168dd5d62f051b24906056242c3db9d4e7a18cca4bd69532834764b2e4b47"} Jan 04 12:10:22 crc kubenswrapper[5003]: I0104 12:10:22.145220 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-69bb8d8f84-sdsn4" Jan 04 12:10:22 crc kubenswrapper[5003]: I0104 12:10:22.437497 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-75dbb546bf-qzckq" Jan 04 12:10:22 crc kubenswrapper[5003]: I0104 12:10:22.508239 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-685444497c-wclfh"] Jan 04 12:10:22 crc kubenswrapper[5003]: I0104 12:10:22.508558 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-685444497c-wclfh" podUID="6b768613-0d91-4d90-a07f-7d2cdb33c3e3" containerName="dnsmasq-dns" containerID="cri-o://b60119d81be9fc4f99fe8bd185446cae74b10c1f7ded466e547d20803cd8d95d" gracePeriod=10 Jan 04 12:10:22 crc kubenswrapper[5003]: I0104 12:10:22.796998 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-685444497c-wclfh" podUID="6b768613-0d91-4d90-a07f-7d2cdb33c3e3" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.154:5353: connect: connection refused" Jan 04 12:10:22 crc kubenswrapper[5003]: I0104 12:10:22.805580 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Jan 04 12:10:22 crc kubenswrapper[5003]: I0104 12:10:22.974910 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 04 12:10:23 crc kubenswrapper[5003]: I0104 12:10:23.028547 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-7848f7888d-tzm72" Jan 04 12:10:23 crc kubenswrapper[5003]: I0104 12:10:23.150052 5003 generic.go:334] "Generic (PLEG): container finished" podID="6b768613-0d91-4d90-a07f-7d2cdb33c3e3" containerID="b60119d81be9fc4f99fe8bd185446cae74b10c1f7ded466e547d20803cd8d95d" exitCode=0 Jan 04 12:10:23 crc kubenswrapper[5003]: I0104 12:10:23.150114 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-685444497c-wclfh" event={"ID":"6b768613-0d91-4d90-a07f-7d2cdb33c3e3","Type":"ContainerDied","Data":"b60119d81be9fc4f99fe8bd185446cae74b10c1f7ded466e547d20803cd8d95d"} Jan 04 12:10:23 crc kubenswrapper[5003]: I0104 12:10:23.150148 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-685444497c-wclfh" event={"ID":"6b768613-0d91-4d90-a07f-7d2cdb33c3e3","Type":"ContainerDied","Data":"a4e8516a8d916db0cf7b58ee45498da76a7c5a1d0b05dc54ae295c6f12e2ed12"} Jan 04 12:10:23 crc kubenswrapper[5003]: I0104 12:10:23.150162 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a4e8516a8d916db0cf7b58ee45498da76a7c5a1d0b05dc54ae295c6f12e2ed12" Jan 04 12:10:23 crc kubenswrapper[5003]: I0104 12:10:23.151891 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="0eb0eeca-c60e-42b8-8df8-6560b563d9c0" containerName="cinder-scheduler" containerID="cri-o://756f6499103740d47f9aabe90ee51e6d5537669adb0c428cee62f6f6a56bb89d" gracePeriod=30 Jan 04 12:10:23 crc kubenswrapper[5003]: I0104 12:10:23.152225 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="0eb0eeca-c60e-42b8-8df8-6560b563d9c0" containerName="probe" containerID="cri-o://64c858dda652466a27ba0ec415a77aa142ff2e4c87f934d78cf0eda08d5cc0db" gracePeriod=30 Jan 04 12:10:23 crc kubenswrapper[5003]: I0104 12:10:23.152371 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2c899132-ee49-4ab3-89ea-95f0bfcb71ab","Type":"ContainerStarted","Data":"46919056bf4b2db61c4b851726637476ed5f878bddbce873c5a30520c666773e"} Jan 04 12:10:23 crc kubenswrapper[5003]: I0104 12:10:23.152750 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Jan 04 12:10:23 crc kubenswrapper[5003]: I0104 12:10:23.184252 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-685444497c-wclfh" Jan 04 12:10:23 crc kubenswrapper[5003]: I0104 12:10:23.197563 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.197544742 podStartE2EDuration="3.197544742s" podCreationTimestamp="2026-01-04 12:10:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:10:23.193490536 +0000 UTC m=+1338.666520377" watchObservedRunningTime="2026-01-04 12:10:23.197544742 +0000 UTC m=+1338.670574593" Jan 04 12:10:23 crc kubenswrapper[5003]: I0104 12:10:23.309433 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-dns-swift-storage-0\") pod \"6b768613-0d91-4d90-a07f-7d2cdb33c3e3\" (UID: \"6b768613-0d91-4d90-a07f-7d2cdb33c3e3\") " Jan 04 12:10:23 crc kubenswrapper[5003]: I0104 12:10:23.309650 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-dns-svc\") pod \"6b768613-0d91-4d90-a07f-7d2cdb33c3e3\" (UID: \"6b768613-0d91-4d90-a07f-7d2cdb33c3e3\") " Jan 04 12:10:23 crc kubenswrapper[5003]: I0104 12:10:23.309746 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mhzwg\" (UniqueName: \"kubernetes.io/projected/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-kube-api-access-mhzwg\") pod \"6b768613-0d91-4d90-a07f-7d2cdb33c3e3\" (UID: \"6b768613-0d91-4d90-a07f-7d2cdb33c3e3\") " Jan 04 12:10:23 crc kubenswrapper[5003]: I0104 12:10:23.309828 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-ovsdbserver-sb\") pod \"6b768613-0d91-4d90-a07f-7d2cdb33c3e3\" (UID: \"6b768613-0d91-4d90-a07f-7d2cdb33c3e3\") " Jan 04 12:10:23 crc kubenswrapper[5003]: I0104 12:10:23.309873 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-config\") pod \"6b768613-0d91-4d90-a07f-7d2cdb33c3e3\" (UID: \"6b768613-0d91-4d90-a07f-7d2cdb33c3e3\") " Jan 04 12:10:23 crc kubenswrapper[5003]: I0104 12:10:23.309940 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-ovsdbserver-nb\") pod \"6b768613-0d91-4d90-a07f-7d2cdb33c3e3\" (UID: \"6b768613-0d91-4d90-a07f-7d2cdb33c3e3\") " Jan 04 12:10:23 crc kubenswrapper[5003]: I0104 12:10:23.325540 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-kube-api-access-mhzwg" (OuterVolumeSpecName: "kube-api-access-mhzwg") pod "6b768613-0d91-4d90-a07f-7d2cdb33c3e3" (UID: "6b768613-0d91-4d90-a07f-7d2cdb33c3e3"). InnerVolumeSpecName "kube-api-access-mhzwg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:23 crc kubenswrapper[5003]: I0104 12:10:23.418955 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mhzwg\" (UniqueName: \"kubernetes.io/projected/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-kube-api-access-mhzwg\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:23 crc kubenswrapper[5003]: I0104 12:10:23.428838 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "6b768613-0d91-4d90-a07f-7d2cdb33c3e3" (UID: "6b768613-0d91-4d90-a07f-7d2cdb33c3e3"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:10:23 crc kubenswrapper[5003]: I0104 12:10:23.429510 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6b768613-0d91-4d90-a07f-7d2cdb33c3e3" (UID: "6b768613-0d91-4d90-a07f-7d2cdb33c3e3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:10:23 crc kubenswrapper[5003]: I0104 12:10:23.442483 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6b768613-0d91-4d90-a07f-7d2cdb33c3e3" (UID: "6b768613-0d91-4d90-a07f-7d2cdb33c3e3"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:10:23 crc kubenswrapper[5003]: I0104 12:10:23.452557 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6b768613-0d91-4d90-a07f-7d2cdb33c3e3" (UID: "6b768613-0d91-4d90-a07f-7d2cdb33c3e3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:10:23 crc kubenswrapper[5003]: I0104 12:10:23.465591 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-config" (OuterVolumeSpecName: "config") pod "6b768613-0d91-4d90-a07f-7d2cdb33c3e3" (UID: "6b768613-0d91-4d90-a07f-7d2cdb33c3e3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:10:23 crc kubenswrapper[5003]: I0104 12:10:23.522386 5003 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:23 crc kubenswrapper[5003]: I0104 12:10:23.522423 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:23 crc kubenswrapper[5003]: I0104 12:10:23.522432 5003 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:23 crc kubenswrapper[5003]: I0104 12:10:23.522445 5003 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:23 crc kubenswrapper[5003]: I0104 12:10:23.522459 5003 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6b768613-0d91-4d90-a07f-7d2cdb33c3e3-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:24 crc kubenswrapper[5003]: I0104 12:10:24.102514 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-5569cb5574-jt6r6" Jan 04 12:10:24 crc kubenswrapper[5003]: I0104 12:10:24.162195 5003 generic.go:334] "Generic (PLEG): container finished" podID="0eb0eeca-c60e-42b8-8df8-6560b563d9c0" containerID="64c858dda652466a27ba0ec415a77aa142ff2e4c87f934d78cf0eda08d5cc0db" exitCode=0 Jan 04 12:10:24 crc kubenswrapper[5003]: I0104 12:10:24.162227 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0eb0eeca-c60e-42b8-8df8-6560b563d9c0","Type":"ContainerDied","Data":"64c858dda652466a27ba0ec415a77aa142ff2e4c87f934d78cf0eda08d5cc0db"} Jan 04 12:10:24 crc kubenswrapper[5003]: I0104 12:10:24.162361 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-685444497c-wclfh" Jan 04 12:10:24 crc kubenswrapper[5003]: I0104 12:10:24.198972 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-685444497c-wclfh"] Jan 04 12:10:24 crc kubenswrapper[5003]: I0104 12:10:24.210714 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-685444497c-wclfh"] Jan 04 12:10:24 crc kubenswrapper[5003]: I0104 12:10:24.232219 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-5569cb5574-jt6r6" Jan 04 12:10:24 crc kubenswrapper[5003]: I0104 12:10:24.818553 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b768613-0d91-4d90-a07f-7d2cdb33c3e3" path="/var/lib/kubelet/pods/6b768613-0d91-4d90-a07f-7d2cdb33c3e3/volumes" Jan 04 12:10:25 crc kubenswrapper[5003]: I0104 12:10:25.238667 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-85dcb76789-v5z7d" Jan 04 12:10:26 crc kubenswrapper[5003]: I0104 12:10:26.838344 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-7d88cc4d8f-5mhr4" Jan 04 12:10:26 crc kubenswrapper[5003]: I0104 12:10:26.916731 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7848f7888d-tzm72"] Jan 04 12:10:26 crc kubenswrapper[5003]: I0104 12:10:26.917422 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7848f7888d-tzm72" podUID="c6779a32-42c6-498d-9968-c98329f48aef" containerName="neutron-api" containerID="cri-o://9c35ddc623b936059247b0c706034f980ad414046e6deaeb3798d3d273938156" gracePeriod=30 Jan 04 12:10:26 crc kubenswrapper[5003]: I0104 12:10:26.917900 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7848f7888d-tzm72" podUID="c6779a32-42c6-498d-9968-c98329f48aef" containerName="neutron-httpd" containerID="cri-o://d40d337b1299b5bdb3678382921b077373f9e1da97c340de5934fbaff16d595d" gracePeriod=30 Jan 04 12:10:27 crc kubenswrapper[5003]: I0104 12:10:27.218610 5003 generic.go:334] "Generic (PLEG): container finished" podID="0eb0eeca-c60e-42b8-8df8-6560b563d9c0" containerID="756f6499103740d47f9aabe90ee51e6d5537669adb0c428cee62f6f6a56bb89d" exitCode=0 Jan 04 12:10:27 crc kubenswrapper[5003]: I0104 12:10:27.218744 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0eb0eeca-c60e-42b8-8df8-6560b563d9c0","Type":"ContainerDied","Data":"756f6499103740d47f9aabe90ee51e6d5537669adb0c428cee62f6f6a56bb89d"} Jan 04 12:10:27 crc kubenswrapper[5003]: I0104 12:10:27.231149 5003 generic.go:334] "Generic (PLEG): container finished" podID="c6779a32-42c6-498d-9968-c98329f48aef" containerID="d40d337b1299b5bdb3678382921b077373f9e1da97c340de5934fbaff16d595d" exitCode=0 Jan 04 12:10:27 crc kubenswrapper[5003]: I0104 12:10:27.231201 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7848f7888d-tzm72" event={"ID":"c6779a32-42c6-498d-9968-c98329f48aef","Type":"ContainerDied","Data":"d40d337b1299b5bdb3678382921b077373f9e1da97c340de5934fbaff16d595d"} Jan 04 12:10:27 crc kubenswrapper[5003]: I0104 12:10:27.766117 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 04 12:10:27 crc kubenswrapper[5003]: I0104 12:10:27.908269 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k252b\" (UniqueName: \"kubernetes.io/projected/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-kube-api-access-k252b\") pod \"0eb0eeca-c60e-42b8-8df8-6560b563d9c0\" (UID: \"0eb0eeca-c60e-42b8-8df8-6560b563d9c0\") " Jan 04 12:10:27 crc kubenswrapper[5003]: I0104 12:10:27.908445 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-combined-ca-bundle\") pod \"0eb0eeca-c60e-42b8-8df8-6560b563d9c0\" (UID: \"0eb0eeca-c60e-42b8-8df8-6560b563d9c0\") " Jan 04 12:10:27 crc kubenswrapper[5003]: I0104 12:10:27.908468 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-scripts\") pod \"0eb0eeca-c60e-42b8-8df8-6560b563d9c0\" (UID: \"0eb0eeca-c60e-42b8-8df8-6560b563d9c0\") " Jan 04 12:10:27 crc kubenswrapper[5003]: I0104 12:10:27.909627 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-etc-machine-id\") pod \"0eb0eeca-c60e-42b8-8df8-6560b563d9c0\" (UID: \"0eb0eeca-c60e-42b8-8df8-6560b563d9c0\") " Jan 04 12:10:27 crc kubenswrapper[5003]: I0104 12:10:27.909658 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-config-data-custom\") pod \"0eb0eeca-c60e-42b8-8df8-6560b563d9c0\" (UID: \"0eb0eeca-c60e-42b8-8df8-6560b563d9c0\") " Jan 04 12:10:27 crc kubenswrapper[5003]: I0104 12:10:27.909734 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-config-data\") pod \"0eb0eeca-c60e-42b8-8df8-6560b563d9c0\" (UID: \"0eb0eeca-c60e-42b8-8df8-6560b563d9c0\") " Jan 04 12:10:27 crc kubenswrapper[5003]: I0104 12:10:27.910800 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "0eb0eeca-c60e-42b8-8df8-6560b563d9c0" (UID: "0eb0eeca-c60e-42b8-8df8-6560b563d9c0"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:10:27 crc kubenswrapper[5003]: I0104 12:10:27.916197 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-scripts" (OuterVolumeSpecName: "scripts") pod "0eb0eeca-c60e-42b8-8df8-6560b563d9c0" (UID: "0eb0eeca-c60e-42b8-8df8-6560b563d9c0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:27 crc kubenswrapper[5003]: I0104 12:10:27.917141 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-kube-api-access-k252b" (OuterVolumeSpecName: "kube-api-access-k252b") pod "0eb0eeca-c60e-42b8-8df8-6560b563d9c0" (UID: "0eb0eeca-c60e-42b8-8df8-6560b563d9c0"). InnerVolumeSpecName "kube-api-access-k252b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:27 crc kubenswrapper[5003]: I0104 12:10:27.933244 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "0eb0eeca-c60e-42b8-8df8-6560b563d9c0" (UID: "0eb0eeca-c60e-42b8-8df8-6560b563d9c0"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:27 crc kubenswrapper[5003]: I0104 12:10:27.985192 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0eb0eeca-c60e-42b8-8df8-6560b563d9c0" (UID: "0eb0eeca-c60e-42b8-8df8-6560b563d9c0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.014184 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k252b\" (UniqueName: \"kubernetes.io/projected/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-kube-api-access-k252b\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.014218 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.014227 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.014236 5003 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.014244 5003 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.027841 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-config-data" (OuterVolumeSpecName: "config-data") pod "0eb0eeca-c60e-42b8-8df8-6560b563d9c0" (UID: "0eb0eeca-c60e-42b8-8df8-6560b563d9c0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.116819 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0eb0eeca-c60e-42b8-8df8-6560b563d9c0-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.246390 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0eb0eeca-c60e-42b8-8df8-6560b563d9c0","Type":"ContainerDied","Data":"9c9350b6f3914f073ac5750de7670d6ca7d945ec13dbfad157d4c9ba724288a8"} Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.247134 5003 scope.go:117] "RemoveContainer" containerID="64c858dda652466a27ba0ec415a77aa142ff2e4c87f934d78cf0eda08d5cc0db" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.247391 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.279178 5003 scope.go:117] "RemoveContainer" containerID="756f6499103740d47f9aabe90ee51e6d5537669adb0c428cee62f6f6a56bb89d" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.298922 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.307862 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.327627 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Jan 04 12:10:28 crc kubenswrapper[5003]: E0104 12:10:28.328262 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0eb0eeca-c60e-42b8-8df8-6560b563d9c0" containerName="probe" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.328288 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="0eb0eeca-c60e-42b8-8df8-6560b563d9c0" containerName="probe" Jan 04 12:10:28 crc kubenswrapper[5003]: E0104 12:10:28.328321 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0eb0eeca-c60e-42b8-8df8-6560b563d9c0" containerName="cinder-scheduler" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.328334 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="0eb0eeca-c60e-42b8-8df8-6560b563d9c0" containerName="cinder-scheduler" Jan 04 12:10:28 crc kubenswrapper[5003]: E0104 12:10:28.328368 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b768613-0d91-4d90-a07f-7d2cdb33c3e3" containerName="init" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.328379 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b768613-0d91-4d90-a07f-7d2cdb33c3e3" containerName="init" Jan 04 12:10:28 crc kubenswrapper[5003]: E0104 12:10:28.328404 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b768613-0d91-4d90-a07f-7d2cdb33c3e3" containerName="dnsmasq-dns" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.328416 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b768613-0d91-4d90-a07f-7d2cdb33c3e3" containerName="dnsmasq-dns" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.328676 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b768613-0d91-4d90-a07f-7d2cdb33c3e3" containerName="dnsmasq-dns" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.328703 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="0eb0eeca-c60e-42b8-8df8-6560b563d9c0" containerName="cinder-scheduler" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.328720 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="0eb0eeca-c60e-42b8-8df8-6560b563d9c0" containerName="probe" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.330108 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.333074 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.353117 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.423272 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7d5d490f-d968-4237-8a63-7f7d01b8708d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7d5d490f-d968-4237-8a63-7f7d01b8708d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.423340 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d5d490f-d968-4237-8a63-7f7d01b8708d-scripts\") pod \"cinder-scheduler-0\" (UID: \"7d5d490f-d968-4237-8a63-7f7d01b8708d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.423551 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7d5d490f-d968-4237-8a63-7f7d01b8708d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7d5d490f-d968-4237-8a63-7f7d01b8708d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.423876 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d5d490f-d968-4237-8a63-7f7d01b8708d-config-data\") pod \"cinder-scheduler-0\" (UID: \"7d5d490f-d968-4237-8a63-7f7d01b8708d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.423925 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d5d490f-d968-4237-8a63-7f7d01b8708d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7d5d490f-d968-4237-8a63-7f7d01b8708d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.423955 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h77s5\" (UniqueName: \"kubernetes.io/projected/7d5d490f-d968-4237-8a63-7f7d01b8708d-kube-api-access-h77s5\") pod \"cinder-scheduler-0\" (UID: \"7d5d490f-d968-4237-8a63-7f7d01b8708d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.428445 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.430301 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.436817 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.436900 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.440521 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-pwmwd" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.463047 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.525544 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7d5d490f-d968-4237-8a63-7f7d01b8708d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7d5d490f-d968-4237-8a63-7f7d01b8708d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.525609 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d5d490f-d968-4237-8a63-7f7d01b8708d-scripts\") pod \"cinder-scheduler-0\" (UID: \"7d5d490f-d968-4237-8a63-7f7d01b8708d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.525657 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xh6rd\" (UniqueName: \"kubernetes.io/projected/627b5a59-3da1-4130-92b1-94fcfea8efd4-kube-api-access-xh6rd\") pod \"openstackclient\" (UID: \"627b5a59-3da1-4130-92b1-94fcfea8efd4\") " pod="openstack/openstackclient" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.525658 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7d5d490f-d968-4237-8a63-7f7d01b8708d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7d5d490f-d968-4237-8a63-7f7d01b8708d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.525678 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7d5d490f-d968-4237-8a63-7f7d01b8708d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7d5d490f-d968-4237-8a63-7f7d01b8708d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.525701 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/627b5a59-3da1-4130-92b1-94fcfea8efd4-combined-ca-bundle\") pod \"openstackclient\" (UID: \"627b5a59-3da1-4130-92b1-94fcfea8efd4\") " pod="openstack/openstackclient" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.525854 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/627b5a59-3da1-4130-92b1-94fcfea8efd4-openstack-config-secret\") pod \"openstackclient\" (UID: \"627b5a59-3da1-4130-92b1-94fcfea8efd4\") " pod="openstack/openstackclient" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.526049 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d5d490f-d968-4237-8a63-7f7d01b8708d-config-data\") pod \"cinder-scheduler-0\" (UID: \"7d5d490f-d968-4237-8a63-7f7d01b8708d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.526110 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d5d490f-d968-4237-8a63-7f7d01b8708d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7d5d490f-d968-4237-8a63-7f7d01b8708d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.526139 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h77s5\" (UniqueName: \"kubernetes.io/projected/7d5d490f-d968-4237-8a63-7f7d01b8708d-kube-api-access-h77s5\") pod \"cinder-scheduler-0\" (UID: \"7d5d490f-d968-4237-8a63-7f7d01b8708d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.526174 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/627b5a59-3da1-4130-92b1-94fcfea8efd4-openstack-config\") pod \"openstackclient\" (UID: \"627b5a59-3da1-4130-92b1-94fcfea8efd4\") " pod="openstack/openstackclient" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.531392 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7d5d490f-d968-4237-8a63-7f7d01b8708d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7d5d490f-d968-4237-8a63-7f7d01b8708d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.538185 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d5d490f-d968-4237-8a63-7f7d01b8708d-config-data\") pod \"cinder-scheduler-0\" (UID: \"7d5d490f-d968-4237-8a63-7f7d01b8708d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.560572 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d5d490f-d968-4237-8a63-7f7d01b8708d-scripts\") pod \"cinder-scheduler-0\" (UID: \"7d5d490f-d968-4237-8a63-7f7d01b8708d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.596373 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h77s5\" (UniqueName: \"kubernetes.io/projected/7d5d490f-d968-4237-8a63-7f7d01b8708d-kube-api-access-h77s5\") pod \"cinder-scheduler-0\" (UID: \"7d5d490f-d968-4237-8a63-7f7d01b8708d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.596617 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d5d490f-d968-4237-8a63-7f7d01b8708d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7d5d490f-d968-4237-8a63-7f7d01b8708d\") " pod="openstack/cinder-scheduler-0" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.629146 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/627b5a59-3da1-4130-92b1-94fcfea8efd4-openstack-config-secret\") pod \"openstackclient\" (UID: \"627b5a59-3da1-4130-92b1-94fcfea8efd4\") " pod="openstack/openstackclient" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.629239 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/627b5a59-3da1-4130-92b1-94fcfea8efd4-openstack-config\") pod \"openstackclient\" (UID: \"627b5a59-3da1-4130-92b1-94fcfea8efd4\") " pod="openstack/openstackclient" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.629304 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xh6rd\" (UniqueName: \"kubernetes.io/projected/627b5a59-3da1-4130-92b1-94fcfea8efd4-kube-api-access-xh6rd\") pod \"openstackclient\" (UID: \"627b5a59-3da1-4130-92b1-94fcfea8efd4\") " pod="openstack/openstackclient" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.629326 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/627b5a59-3da1-4130-92b1-94fcfea8efd4-combined-ca-bundle\") pod \"openstackclient\" (UID: \"627b5a59-3da1-4130-92b1-94fcfea8efd4\") " pod="openstack/openstackclient" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.631559 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/627b5a59-3da1-4130-92b1-94fcfea8efd4-openstack-config\") pod \"openstackclient\" (UID: \"627b5a59-3da1-4130-92b1-94fcfea8efd4\") " pod="openstack/openstackclient" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.634507 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/627b5a59-3da1-4130-92b1-94fcfea8efd4-openstack-config-secret\") pod \"openstackclient\" (UID: \"627b5a59-3da1-4130-92b1-94fcfea8efd4\") " pod="openstack/openstackclient" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.650151 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.650844 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/627b5a59-3da1-4130-92b1-94fcfea8efd4-combined-ca-bundle\") pod \"openstackclient\" (UID: \"627b5a59-3da1-4130-92b1-94fcfea8efd4\") " pod="openstack/openstackclient" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.654446 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xh6rd\" (UniqueName: \"kubernetes.io/projected/627b5a59-3da1-4130-92b1-94fcfea8efd4-kube-api-access-xh6rd\") pod \"openstackclient\" (UID: \"627b5a59-3da1-4130-92b1-94fcfea8efd4\") " pod="openstack/openstackclient" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.747075 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 04 12:10:28 crc kubenswrapper[5003]: I0104 12:10:28.834516 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0eb0eeca-c60e-42b8-8df8-6560b563d9c0" path="/var/lib/kubelet/pods/0eb0eeca-c60e-42b8-8df8-6560b563d9c0/volumes" Jan 04 12:10:29 crc kubenswrapper[5003]: I0104 12:10:29.275078 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 04 12:10:29 crc kubenswrapper[5003]: W0104 12:10:29.305315 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7d5d490f_d968_4237_8a63_7f7d01b8708d.slice/crio-58151d94a3364d675297e071fbac00a96c7a23ef674378752a5510281eb8f1d1 WatchSource:0}: Error finding container 58151d94a3364d675297e071fbac00a96c7a23ef674378752a5510281eb8f1d1: Status 404 returned error can't find the container with id 58151d94a3364d675297e071fbac00a96c7a23ef674378752a5510281eb8f1d1 Jan 04 12:10:29 crc kubenswrapper[5003]: I0104 12:10:29.388061 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 04 12:10:30 crc kubenswrapper[5003]: I0104 12:10:30.289230 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"627b5a59-3da1-4130-92b1-94fcfea8efd4","Type":"ContainerStarted","Data":"6fba32715bf7dcb18ef5bfd162fb792696aa3c135a0de18253f7b75f11967e76"} Jan 04 12:10:30 crc kubenswrapper[5003]: I0104 12:10:30.292762 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7d5d490f-d968-4237-8a63-7f7d01b8708d","Type":"ContainerStarted","Data":"5e99af0ebfd30376d7774551d5e2627fb89feccfb91fc75b405039132573a695"} Jan 04 12:10:30 crc kubenswrapper[5003]: I0104 12:10:30.292818 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7d5d490f-d968-4237-8a63-7f7d01b8708d","Type":"ContainerStarted","Data":"58151d94a3364d675297e071fbac00a96c7a23ef674378752a5510281eb8f1d1"} Jan 04 12:10:30 crc kubenswrapper[5003]: I0104 12:10:30.718766 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-66fdccd748-zk2qt" Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.069002 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-66fdccd748-zk2qt" Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.129380 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-69bb8d8f84-sdsn4"] Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.130906 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-69bb8d8f84-sdsn4" podUID="1a785b98-dc58-469e-b1fd-4dba03f79d63" containerName="barbican-api-log" containerID="cri-o://6029bc8416ed350837a9ee4bb69bd0465579a5bf564ff938313ee525de4f5c59" gracePeriod=30 Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.132610 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-69bb8d8f84-sdsn4" podUID="1a785b98-dc58-469e-b1fd-4dba03f79d63" containerName="barbican-api" containerID="cri-o://e7c254de6716144cd491d99149d98905d172d0db9c6882e1846b5c67a1f49837" gracePeriod=30 Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.367529 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7848f7888d-tzm72" Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.369501 5003 generic.go:334] "Generic (PLEG): container finished" podID="c6779a32-42c6-498d-9968-c98329f48aef" containerID="9c35ddc623b936059247b0c706034f980ad414046e6deaeb3798d3d273938156" exitCode=0 Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.369574 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7848f7888d-tzm72" event={"ID":"c6779a32-42c6-498d-9968-c98329f48aef","Type":"ContainerDied","Data":"9c35ddc623b936059247b0c706034f980ad414046e6deaeb3798d3d273938156"} Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.369613 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7848f7888d-tzm72" event={"ID":"c6779a32-42c6-498d-9968-c98329f48aef","Type":"ContainerDied","Data":"78f6805dca0f17b804a34365736d2026d1121c9402b49f8637a3d96f10c5abba"} Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.369631 5003 scope.go:117] "RemoveContainer" containerID="d40d337b1299b5bdb3678382921b077373f9e1da97c340de5934fbaff16d595d" Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.375762 5003 generic.go:334] "Generic (PLEG): container finished" podID="1a785b98-dc58-469e-b1fd-4dba03f79d63" containerID="6029bc8416ed350837a9ee4bb69bd0465579a5bf564ff938313ee525de4f5c59" exitCode=143 Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.375845 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-69bb8d8f84-sdsn4" event={"ID":"1a785b98-dc58-469e-b1fd-4dba03f79d63","Type":"ContainerDied","Data":"6029bc8416ed350837a9ee4bb69bd0465579a5bf564ff938313ee525de4f5c59"} Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.380484 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7d5d490f-d968-4237-8a63-7f7d01b8708d","Type":"ContainerStarted","Data":"08cc0e5f5513aaea421a5ce329e2f5d7400d11b3025b3985ed67ee4874cda18c"} Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.431706 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.431685103 podStartE2EDuration="3.431685103s" podCreationTimestamp="2026-01-04 12:10:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:10:31.413914239 +0000 UTC m=+1346.886944090" watchObservedRunningTime="2026-01-04 12:10:31.431685103 +0000 UTC m=+1346.904714944" Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.437343 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c6779a32-42c6-498d-9968-c98329f48aef-config\") pod \"c6779a32-42c6-498d-9968-c98329f48aef\" (UID: \"c6779a32-42c6-498d-9968-c98329f48aef\") " Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.437440 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6779a32-42c6-498d-9968-c98329f48aef-ovndb-tls-certs\") pod \"c6779a32-42c6-498d-9968-c98329f48aef\" (UID: \"c6779a32-42c6-498d-9968-c98329f48aef\") " Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.437528 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wc6f9\" (UniqueName: \"kubernetes.io/projected/c6779a32-42c6-498d-9968-c98329f48aef-kube-api-access-wc6f9\") pod \"c6779a32-42c6-498d-9968-c98329f48aef\" (UID: \"c6779a32-42c6-498d-9968-c98329f48aef\") " Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.437594 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c6779a32-42c6-498d-9968-c98329f48aef-httpd-config\") pod \"c6779a32-42c6-498d-9968-c98329f48aef\" (UID: \"c6779a32-42c6-498d-9968-c98329f48aef\") " Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.437670 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6779a32-42c6-498d-9968-c98329f48aef-combined-ca-bundle\") pod \"c6779a32-42c6-498d-9968-c98329f48aef\" (UID: \"c6779a32-42c6-498d-9968-c98329f48aef\") " Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.441742 5003 scope.go:117] "RemoveContainer" containerID="9c35ddc623b936059247b0c706034f980ad414046e6deaeb3798d3d273938156" Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.449590 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6779a32-42c6-498d-9968-c98329f48aef-kube-api-access-wc6f9" (OuterVolumeSpecName: "kube-api-access-wc6f9") pod "c6779a32-42c6-498d-9968-c98329f48aef" (UID: "c6779a32-42c6-498d-9968-c98329f48aef"). InnerVolumeSpecName "kube-api-access-wc6f9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.450789 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6779a32-42c6-498d-9968-c98329f48aef-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "c6779a32-42c6-498d-9968-c98329f48aef" (UID: "c6779a32-42c6-498d-9968-c98329f48aef"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.517601 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6779a32-42c6-498d-9968-c98329f48aef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c6779a32-42c6-498d-9968-c98329f48aef" (UID: "c6779a32-42c6-498d-9968-c98329f48aef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.549342 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wc6f9\" (UniqueName: \"kubernetes.io/projected/c6779a32-42c6-498d-9968-c98329f48aef-kube-api-access-wc6f9\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.549494 5003 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c6779a32-42c6-498d-9968-c98329f48aef-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.549572 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6779a32-42c6-498d-9968-c98329f48aef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.610638 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6779a32-42c6-498d-9968-c98329f48aef-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "c6779a32-42c6-498d-9968-c98329f48aef" (UID: "c6779a32-42c6-498d-9968-c98329f48aef"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.628566 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6779a32-42c6-498d-9968-c98329f48aef-config" (OuterVolumeSpecName: "config") pod "c6779a32-42c6-498d-9968-c98329f48aef" (UID: "c6779a32-42c6-498d-9968-c98329f48aef"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.642460 5003 scope.go:117] "RemoveContainer" containerID="d40d337b1299b5bdb3678382921b077373f9e1da97c340de5934fbaff16d595d" Jan 04 12:10:31 crc kubenswrapper[5003]: E0104 12:10:31.647211 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d40d337b1299b5bdb3678382921b077373f9e1da97c340de5934fbaff16d595d\": container with ID starting with d40d337b1299b5bdb3678382921b077373f9e1da97c340de5934fbaff16d595d not found: ID does not exist" containerID="d40d337b1299b5bdb3678382921b077373f9e1da97c340de5934fbaff16d595d" Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.647277 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d40d337b1299b5bdb3678382921b077373f9e1da97c340de5934fbaff16d595d"} err="failed to get container status \"d40d337b1299b5bdb3678382921b077373f9e1da97c340de5934fbaff16d595d\": rpc error: code = NotFound desc = could not find container \"d40d337b1299b5bdb3678382921b077373f9e1da97c340de5934fbaff16d595d\": container with ID starting with d40d337b1299b5bdb3678382921b077373f9e1da97c340de5934fbaff16d595d not found: ID does not exist" Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.647310 5003 scope.go:117] "RemoveContainer" containerID="9c35ddc623b936059247b0c706034f980ad414046e6deaeb3798d3d273938156" Jan 04 12:10:31 crc kubenswrapper[5003]: E0104 12:10:31.650889 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c35ddc623b936059247b0c706034f980ad414046e6deaeb3798d3d273938156\": container with ID starting with 9c35ddc623b936059247b0c706034f980ad414046e6deaeb3798d3d273938156 not found: ID does not exist" containerID="9c35ddc623b936059247b0c706034f980ad414046e6deaeb3798d3d273938156" Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.650931 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c35ddc623b936059247b0c706034f980ad414046e6deaeb3798d3d273938156"} err="failed to get container status \"9c35ddc623b936059247b0c706034f980ad414046e6deaeb3798d3d273938156\": rpc error: code = NotFound desc = could not find container \"9c35ddc623b936059247b0c706034f980ad414046e6deaeb3798d3d273938156\": container with ID starting with 9c35ddc623b936059247b0c706034f980ad414046e6deaeb3798d3d273938156 not found: ID does not exist" Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.652961 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/c6779a32-42c6-498d-9968-c98329f48aef-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:31 crc kubenswrapper[5003]: I0104 12:10:31.653003 5003 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6779a32-42c6-498d-9968-c98329f48aef-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:32 crc kubenswrapper[5003]: I0104 12:10:32.396761 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7848f7888d-tzm72" Jan 04 12:10:32 crc kubenswrapper[5003]: I0104 12:10:32.446107 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7848f7888d-tzm72"] Jan 04 12:10:32 crc kubenswrapper[5003]: I0104 12:10:32.456616 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-7848f7888d-tzm72"] Jan 04 12:10:32 crc kubenswrapper[5003]: I0104 12:10:32.846785 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6779a32-42c6-498d-9968-c98329f48aef" path="/var/lib/kubelet/pods/c6779a32-42c6-498d-9968-c98329f48aef/volumes" Jan 04 12:10:33 crc kubenswrapper[5003]: I0104 12:10:33.609122 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Jan 04 12:10:33 crc kubenswrapper[5003]: I0104 12:10:33.877719 5003 patch_prober.go:28] interesting pod/authentication-operator-69f744f599-n7vrk container/authentication-operator namespace/openshift-authentication-operator: Liveness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 04 12:10:33 crc kubenswrapper[5003]: I0104 12:10:33.877799 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-authentication-operator/authentication-operator-69f744f599-n7vrk" podUID="7044cdc7-b7c2-454e-9460-8f6b783f85eb" containerName="authentication-operator" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 04 12:10:33 crc kubenswrapper[5003]: I0104 12:10:33.902099 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/openstack-cell1-galera-0" podUID="e9f73829-d0a1-4e4d-8f5a-755d63ce1caa" containerName="galera" probeResult="failure" output="command timed out" Jan 04 12:10:33 crc kubenswrapper[5003]: I0104 12:10:33.904104 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Jan 04 12:10:34 crc kubenswrapper[5003]: I0104 12:10:34.873066 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-69bb8d8f84-sdsn4" Jan 04 12:10:34 crc kubenswrapper[5003]: I0104 12:10:34.959651 5003 generic.go:334] "Generic (PLEG): container finished" podID="1a785b98-dc58-469e-b1fd-4dba03f79d63" containerID="e7c254de6716144cd491d99149d98905d172d0db9c6882e1846b5c67a1f49837" exitCode=0 Jan 04 12:10:34 crc kubenswrapper[5003]: I0104 12:10:34.959723 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-69bb8d8f84-sdsn4" Jan 04 12:10:34 crc kubenswrapper[5003]: I0104 12:10:34.959712 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-69bb8d8f84-sdsn4" event={"ID":"1a785b98-dc58-469e-b1fd-4dba03f79d63","Type":"ContainerDied","Data":"e7c254de6716144cd491d99149d98905d172d0db9c6882e1846b5c67a1f49837"} Jan 04 12:10:34 crc kubenswrapper[5003]: I0104 12:10:34.959855 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-69bb8d8f84-sdsn4" event={"ID":"1a785b98-dc58-469e-b1fd-4dba03f79d63","Type":"ContainerDied","Data":"0469edb717d134607e05a2e5ebaf68614da6112ae9c5a65082ff579ed0d27c1d"} Jan 04 12:10:34 crc kubenswrapper[5003]: I0104 12:10:34.959875 5003 scope.go:117] "RemoveContainer" containerID="e7c254de6716144cd491d99149d98905d172d0db9c6882e1846b5c67a1f49837" Jan 04 12:10:34 crc kubenswrapper[5003]: I0104 12:10:34.998440 5003 scope.go:117] "RemoveContainer" containerID="6029bc8416ed350837a9ee4bb69bd0465579a5bf564ff938313ee525de4f5c59" Jan 04 12:10:35 crc kubenswrapper[5003]: I0104 12:10:35.018173 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a785b98-dc58-469e-b1fd-4dba03f79d63-config-data\") pod \"1a785b98-dc58-469e-b1fd-4dba03f79d63\" (UID: \"1a785b98-dc58-469e-b1fd-4dba03f79d63\") " Jan 04 12:10:35 crc kubenswrapper[5003]: I0104 12:10:35.018267 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1a785b98-dc58-469e-b1fd-4dba03f79d63-config-data-custom\") pod \"1a785b98-dc58-469e-b1fd-4dba03f79d63\" (UID: \"1a785b98-dc58-469e-b1fd-4dba03f79d63\") " Jan 04 12:10:35 crc kubenswrapper[5003]: I0104 12:10:35.018458 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a785b98-dc58-469e-b1fd-4dba03f79d63-combined-ca-bundle\") pod \"1a785b98-dc58-469e-b1fd-4dba03f79d63\" (UID: \"1a785b98-dc58-469e-b1fd-4dba03f79d63\") " Jan 04 12:10:35 crc kubenswrapper[5003]: I0104 12:10:35.018493 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1a785b98-dc58-469e-b1fd-4dba03f79d63-logs\") pod \"1a785b98-dc58-469e-b1fd-4dba03f79d63\" (UID: \"1a785b98-dc58-469e-b1fd-4dba03f79d63\") " Jan 04 12:10:35 crc kubenswrapper[5003]: I0104 12:10:35.018562 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8qvht\" (UniqueName: \"kubernetes.io/projected/1a785b98-dc58-469e-b1fd-4dba03f79d63-kube-api-access-8qvht\") pod \"1a785b98-dc58-469e-b1fd-4dba03f79d63\" (UID: \"1a785b98-dc58-469e-b1fd-4dba03f79d63\") " Jan 04 12:10:35 crc kubenswrapper[5003]: I0104 12:10:35.019684 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1a785b98-dc58-469e-b1fd-4dba03f79d63-logs" (OuterVolumeSpecName: "logs") pod "1a785b98-dc58-469e-b1fd-4dba03f79d63" (UID: "1a785b98-dc58-469e-b1fd-4dba03f79d63"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:10:35 crc kubenswrapper[5003]: I0104 12:10:35.026403 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a785b98-dc58-469e-b1fd-4dba03f79d63-kube-api-access-8qvht" (OuterVolumeSpecName: "kube-api-access-8qvht") pod "1a785b98-dc58-469e-b1fd-4dba03f79d63" (UID: "1a785b98-dc58-469e-b1fd-4dba03f79d63"). InnerVolumeSpecName "kube-api-access-8qvht". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:35 crc kubenswrapper[5003]: I0104 12:10:35.027127 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a785b98-dc58-469e-b1fd-4dba03f79d63-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "1a785b98-dc58-469e-b1fd-4dba03f79d63" (UID: "1a785b98-dc58-469e-b1fd-4dba03f79d63"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:35 crc kubenswrapper[5003]: I0104 12:10:35.027845 5003 scope.go:117] "RemoveContainer" containerID="e7c254de6716144cd491d99149d98905d172d0db9c6882e1846b5c67a1f49837" Jan 04 12:10:35 crc kubenswrapper[5003]: E0104 12:10:35.029561 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7c254de6716144cd491d99149d98905d172d0db9c6882e1846b5c67a1f49837\": container with ID starting with e7c254de6716144cd491d99149d98905d172d0db9c6882e1846b5c67a1f49837 not found: ID does not exist" containerID="e7c254de6716144cd491d99149d98905d172d0db9c6882e1846b5c67a1f49837" Jan 04 12:10:35 crc kubenswrapper[5003]: I0104 12:10:35.029617 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7c254de6716144cd491d99149d98905d172d0db9c6882e1846b5c67a1f49837"} err="failed to get container status \"e7c254de6716144cd491d99149d98905d172d0db9c6882e1846b5c67a1f49837\": rpc error: code = NotFound desc = could not find container \"e7c254de6716144cd491d99149d98905d172d0db9c6882e1846b5c67a1f49837\": container with ID starting with e7c254de6716144cd491d99149d98905d172d0db9c6882e1846b5c67a1f49837 not found: ID does not exist" Jan 04 12:10:35 crc kubenswrapper[5003]: I0104 12:10:35.029652 5003 scope.go:117] "RemoveContainer" containerID="6029bc8416ed350837a9ee4bb69bd0465579a5bf564ff938313ee525de4f5c59" Jan 04 12:10:35 crc kubenswrapper[5003]: E0104 12:10:35.030201 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6029bc8416ed350837a9ee4bb69bd0465579a5bf564ff938313ee525de4f5c59\": container with ID starting with 6029bc8416ed350837a9ee4bb69bd0465579a5bf564ff938313ee525de4f5c59 not found: ID does not exist" containerID="6029bc8416ed350837a9ee4bb69bd0465579a5bf564ff938313ee525de4f5c59" Jan 04 12:10:35 crc kubenswrapper[5003]: I0104 12:10:35.030223 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6029bc8416ed350837a9ee4bb69bd0465579a5bf564ff938313ee525de4f5c59"} err="failed to get container status \"6029bc8416ed350837a9ee4bb69bd0465579a5bf564ff938313ee525de4f5c59\": rpc error: code = NotFound desc = could not find container \"6029bc8416ed350837a9ee4bb69bd0465579a5bf564ff938313ee525de4f5c59\": container with ID starting with 6029bc8416ed350837a9ee4bb69bd0465579a5bf564ff938313ee525de4f5c59 not found: ID does not exist" Jan 04 12:10:35 crc kubenswrapper[5003]: I0104 12:10:35.063098 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a785b98-dc58-469e-b1fd-4dba03f79d63-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1a785b98-dc58-469e-b1fd-4dba03f79d63" (UID: "1a785b98-dc58-469e-b1fd-4dba03f79d63"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:35 crc kubenswrapper[5003]: I0104 12:10:35.094230 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a785b98-dc58-469e-b1fd-4dba03f79d63-config-data" (OuterVolumeSpecName: "config-data") pod "1a785b98-dc58-469e-b1fd-4dba03f79d63" (UID: "1a785b98-dc58-469e-b1fd-4dba03f79d63"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:35 crc kubenswrapper[5003]: I0104 12:10:35.120337 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a785b98-dc58-469e-b1fd-4dba03f79d63-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:35 crc kubenswrapper[5003]: I0104 12:10:35.120499 5003 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1a785b98-dc58-469e-b1fd-4dba03f79d63-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:35 crc kubenswrapper[5003]: I0104 12:10:35.120584 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8qvht\" (UniqueName: \"kubernetes.io/projected/1a785b98-dc58-469e-b1fd-4dba03f79d63-kube-api-access-8qvht\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:35 crc kubenswrapper[5003]: I0104 12:10:35.120643 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a785b98-dc58-469e-b1fd-4dba03f79d63-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:35 crc kubenswrapper[5003]: I0104 12:10:35.120696 5003 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1a785b98-dc58-469e-b1fd-4dba03f79d63-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:35 crc kubenswrapper[5003]: I0104 12:10:35.292740 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-69bb8d8f84-sdsn4"] Jan 04 12:10:35 crc kubenswrapper[5003]: I0104 12:10:35.300497 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-69bb8d8f84-sdsn4"] Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.771204 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-6885ccbc9-jntqm"] Jan 04 12:10:36 crc kubenswrapper[5003]: E0104 12:10:36.772056 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a785b98-dc58-469e-b1fd-4dba03f79d63" containerName="barbican-api-log" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.772071 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a785b98-dc58-469e-b1fd-4dba03f79d63" containerName="barbican-api-log" Jan 04 12:10:36 crc kubenswrapper[5003]: E0104 12:10:36.772097 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a785b98-dc58-469e-b1fd-4dba03f79d63" containerName="barbican-api" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.772104 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a785b98-dc58-469e-b1fd-4dba03f79d63" containerName="barbican-api" Jan 04 12:10:36 crc kubenswrapper[5003]: E0104 12:10:36.772115 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6779a32-42c6-498d-9968-c98329f48aef" containerName="neutron-api" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.772122 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6779a32-42c6-498d-9968-c98329f48aef" containerName="neutron-api" Jan 04 12:10:36 crc kubenswrapper[5003]: E0104 12:10:36.772144 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6779a32-42c6-498d-9968-c98329f48aef" containerName="neutron-httpd" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.772150 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6779a32-42c6-498d-9968-c98329f48aef" containerName="neutron-httpd" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.772333 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a785b98-dc58-469e-b1fd-4dba03f79d63" containerName="barbican-api-log" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.772343 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6779a32-42c6-498d-9968-c98329f48aef" containerName="neutron-httpd" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.772356 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6779a32-42c6-498d-9968-c98329f48aef" containerName="neutron-api" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.772373 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a785b98-dc58-469e-b1fd-4dba03f79d63" containerName="barbican-api" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.774992 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6885ccbc9-jntqm" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.777754 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.778172 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.778380 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.796597 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-6885ccbc9-jntqm"] Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.825780 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a785b98-dc58-469e-b1fd-4dba03f79d63" path="/var/lib/kubelet/pods/1a785b98-dc58-469e-b1fd-4dba03f79d63/volumes" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.863003 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/afad0966-8385-444b-9eed-8418c0a49b2a-internal-tls-certs\") pod \"swift-proxy-6885ccbc9-jntqm\" (UID: \"afad0966-8385-444b-9eed-8418c0a49b2a\") " pod="openstack/swift-proxy-6885ccbc9-jntqm" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.863082 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afad0966-8385-444b-9eed-8418c0a49b2a-combined-ca-bundle\") pod \"swift-proxy-6885ccbc9-jntqm\" (UID: \"afad0966-8385-444b-9eed-8418c0a49b2a\") " pod="openstack/swift-proxy-6885ccbc9-jntqm" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.864330 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afad0966-8385-444b-9eed-8418c0a49b2a-config-data\") pod \"swift-proxy-6885ccbc9-jntqm\" (UID: \"afad0966-8385-444b-9eed-8418c0a49b2a\") " pod="openstack/swift-proxy-6885ccbc9-jntqm" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.864362 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/afad0966-8385-444b-9eed-8418c0a49b2a-public-tls-certs\") pod \"swift-proxy-6885ccbc9-jntqm\" (UID: \"afad0966-8385-444b-9eed-8418c0a49b2a\") " pod="openstack/swift-proxy-6885ccbc9-jntqm" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.864394 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/afad0966-8385-444b-9eed-8418c0a49b2a-etc-swift\") pod \"swift-proxy-6885ccbc9-jntqm\" (UID: \"afad0966-8385-444b-9eed-8418c0a49b2a\") " pod="openstack/swift-proxy-6885ccbc9-jntqm" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.864439 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/afad0966-8385-444b-9eed-8418c0a49b2a-run-httpd\") pod \"swift-proxy-6885ccbc9-jntqm\" (UID: \"afad0966-8385-444b-9eed-8418c0a49b2a\") " pod="openstack/swift-proxy-6885ccbc9-jntqm" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.864460 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wl4jq\" (UniqueName: \"kubernetes.io/projected/afad0966-8385-444b-9eed-8418c0a49b2a-kube-api-access-wl4jq\") pod \"swift-proxy-6885ccbc9-jntqm\" (UID: \"afad0966-8385-444b-9eed-8418c0a49b2a\") " pod="openstack/swift-proxy-6885ccbc9-jntqm" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.864482 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/afad0966-8385-444b-9eed-8418c0a49b2a-log-httpd\") pod \"swift-proxy-6885ccbc9-jntqm\" (UID: \"afad0966-8385-444b-9eed-8418c0a49b2a\") " pod="openstack/swift-proxy-6885ccbc9-jntqm" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.966863 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afad0966-8385-444b-9eed-8418c0a49b2a-config-data\") pod \"swift-proxy-6885ccbc9-jntqm\" (UID: \"afad0966-8385-444b-9eed-8418c0a49b2a\") " pod="openstack/swift-proxy-6885ccbc9-jntqm" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.966935 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/afad0966-8385-444b-9eed-8418c0a49b2a-public-tls-certs\") pod \"swift-proxy-6885ccbc9-jntqm\" (UID: \"afad0966-8385-444b-9eed-8418c0a49b2a\") " pod="openstack/swift-proxy-6885ccbc9-jntqm" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.966979 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/afad0966-8385-444b-9eed-8418c0a49b2a-etc-swift\") pod \"swift-proxy-6885ccbc9-jntqm\" (UID: \"afad0966-8385-444b-9eed-8418c0a49b2a\") " pod="openstack/swift-proxy-6885ccbc9-jntqm" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.967029 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/afad0966-8385-444b-9eed-8418c0a49b2a-run-httpd\") pod \"swift-proxy-6885ccbc9-jntqm\" (UID: \"afad0966-8385-444b-9eed-8418c0a49b2a\") " pod="openstack/swift-proxy-6885ccbc9-jntqm" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.967065 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wl4jq\" (UniqueName: \"kubernetes.io/projected/afad0966-8385-444b-9eed-8418c0a49b2a-kube-api-access-wl4jq\") pod \"swift-proxy-6885ccbc9-jntqm\" (UID: \"afad0966-8385-444b-9eed-8418c0a49b2a\") " pod="openstack/swift-proxy-6885ccbc9-jntqm" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.967101 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/afad0966-8385-444b-9eed-8418c0a49b2a-log-httpd\") pod \"swift-proxy-6885ccbc9-jntqm\" (UID: \"afad0966-8385-444b-9eed-8418c0a49b2a\") " pod="openstack/swift-proxy-6885ccbc9-jntqm" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.967187 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/afad0966-8385-444b-9eed-8418c0a49b2a-internal-tls-certs\") pod \"swift-proxy-6885ccbc9-jntqm\" (UID: \"afad0966-8385-444b-9eed-8418c0a49b2a\") " pod="openstack/swift-proxy-6885ccbc9-jntqm" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.967256 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afad0966-8385-444b-9eed-8418c0a49b2a-combined-ca-bundle\") pod \"swift-proxy-6885ccbc9-jntqm\" (UID: \"afad0966-8385-444b-9eed-8418c0a49b2a\") " pod="openstack/swift-proxy-6885ccbc9-jntqm" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.971607 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/afad0966-8385-444b-9eed-8418c0a49b2a-run-httpd\") pod \"swift-proxy-6885ccbc9-jntqm\" (UID: \"afad0966-8385-444b-9eed-8418c0a49b2a\") " pod="openstack/swift-proxy-6885ccbc9-jntqm" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.971608 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/afad0966-8385-444b-9eed-8418c0a49b2a-log-httpd\") pod \"swift-proxy-6885ccbc9-jntqm\" (UID: \"afad0966-8385-444b-9eed-8418c0a49b2a\") " pod="openstack/swift-proxy-6885ccbc9-jntqm" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.976002 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/afad0966-8385-444b-9eed-8418c0a49b2a-public-tls-certs\") pod \"swift-proxy-6885ccbc9-jntqm\" (UID: \"afad0966-8385-444b-9eed-8418c0a49b2a\") " pod="openstack/swift-proxy-6885ccbc9-jntqm" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.977646 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/afad0966-8385-444b-9eed-8418c0a49b2a-etc-swift\") pod \"swift-proxy-6885ccbc9-jntqm\" (UID: \"afad0966-8385-444b-9eed-8418c0a49b2a\") " pod="openstack/swift-proxy-6885ccbc9-jntqm" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.978204 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afad0966-8385-444b-9eed-8418c0a49b2a-config-data\") pod \"swift-proxy-6885ccbc9-jntqm\" (UID: \"afad0966-8385-444b-9eed-8418c0a49b2a\") " pod="openstack/swift-proxy-6885ccbc9-jntqm" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.980757 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afad0966-8385-444b-9eed-8418c0a49b2a-combined-ca-bundle\") pod \"swift-proxy-6885ccbc9-jntqm\" (UID: \"afad0966-8385-444b-9eed-8418c0a49b2a\") " pod="openstack/swift-proxy-6885ccbc9-jntqm" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.984950 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/afad0966-8385-444b-9eed-8418c0a49b2a-internal-tls-certs\") pod \"swift-proxy-6885ccbc9-jntqm\" (UID: \"afad0966-8385-444b-9eed-8418c0a49b2a\") " pod="openstack/swift-proxy-6885ccbc9-jntqm" Jan 04 12:10:36 crc kubenswrapper[5003]: I0104 12:10:36.994045 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wl4jq\" (UniqueName: \"kubernetes.io/projected/afad0966-8385-444b-9eed-8418c0a49b2a-kube-api-access-wl4jq\") pod \"swift-proxy-6885ccbc9-jntqm\" (UID: \"afad0966-8385-444b-9eed-8418c0a49b2a\") " pod="openstack/swift-proxy-6885ccbc9-jntqm" Jan 04 12:10:37 crc kubenswrapper[5003]: I0104 12:10:37.093433 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6885ccbc9-jntqm" Jan 04 12:10:38 crc kubenswrapper[5003]: I0104 12:10:38.081708 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jan 04 12:10:38 crc kubenswrapper[5003]: I0104 12:10:38.274597 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:10:38 crc kubenswrapper[5003]: I0104 12:10:38.926502 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Jan 04 12:10:39 crc kubenswrapper[5003]: I0104 12:10:39.041627 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="235041d8-b61c-4f18-b352-c07ae69e5c49" containerName="ceilometer-central-agent" containerID="cri-o://9c193888e3c481aed219acf655f13d78e575780d3531aedb6e32f5b693f84b85" gracePeriod=30 Jan 04 12:10:39 crc kubenswrapper[5003]: I0104 12:10:39.042650 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="235041d8-b61c-4f18-b352-c07ae69e5c49" containerName="sg-core" containerID="cri-o://8f68d0e8c69ee6303a4ae0263486d6f240c009e3360e4375ca784cbc05abb1f2" gracePeriod=30 Jan 04 12:10:39 crc kubenswrapper[5003]: I0104 12:10:39.042721 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="235041d8-b61c-4f18-b352-c07ae69e5c49" containerName="proxy-httpd" containerID="cri-o://62db70e3669f5fcb779c3b4b14cfdd8579a7b22e6e378d1a2e0afbf72dc62065" gracePeriod=30 Jan 04 12:10:39 crc kubenswrapper[5003]: I0104 12:10:39.042734 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="235041d8-b61c-4f18-b352-c07ae69e5c49" containerName="ceilometer-notification-agent" containerID="cri-o://713301b7ac7149d0d418c7d8c3bcfff41bc88b397b49860b2763db0f75492542" gracePeriod=30 Jan 04 12:10:39 crc kubenswrapper[5003]: I0104 12:10:39.418547 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:10:39 crc kubenswrapper[5003]: I0104 12:10:39.418616 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:10:40 crc kubenswrapper[5003]: I0104 12:10:40.077993 5003 generic.go:334] "Generic (PLEG): container finished" podID="235041d8-b61c-4f18-b352-c07ae69e5c49" containerID="62db70e3669f5fcb779c3b4b14cfdd8579a7b22e6e378d1a2e0afbf72dc62065" exitCode=0 Jan 04 12:10:40 crc kubenswrapper[5003]: I0104 12:10:40.078418 5003 generic.go:334] "Generic (PLEG): container finished" podID="235041d8-b61c-4f18-b352-c07ae69e5c49" containerID="8f68d0e8c69ee6303a4ae0263486d6f240c009e3360e4375ca784cbc05abb1f2" exitCode=2 Jan 04 12:10:40 crc kubenswrapper[5003]: I0104 12:10:40.078427 5003 generic.go:334] "Generic (PLEG): container finished" podID="235041d8-b61c-4f18-b352-c07ae69e5c49" containerID="9c193888e3c481aed219acf655f13d78e575780d3531aedb6e32f5b693f84b85" exitCode=0 Jan 04 12:10:40 crc kubenswrapper[5003]: I0104 12:10:40.078073 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"235041d8-b61c-4f18-b352-c07ae69e5c49","Type":"ContainerDied","Data":"62db70e3669f5fcb779c3b4b14cfdd8579a7b22e6e378d1a2e0afbf72dc62065"} Jan 04 12:10:40 crc kubenswrapper[5003]: I0104 12:10:40.078482 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"235041d8-b61c-4f18-b352-c07ae69e5c49","Type":"ContainerDied","Data":"8f68d0e8c69ee6303a4ae0263486d6f240c009e3360e4375ca784cbc05abb1f2"} Jan 04 12:10:40 crc kubenswrapper[5003]: I0104 12:10:40.078503 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"235041d8-b61c-4f18-b352-c07ae69e5c49","Type":"ContainerDied","Data":"9c193888e3c481aed219acf655f13d78e575780d3531aedb6e32f5b693f84b85"} Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.598597 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-xw5xf"] Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.602587 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-xw5xf" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.621240 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-xw5xf"] Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.685220 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-nvbp2"] Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.686616 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-nvbp2" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.725219 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8100fc92-5887-4c6e-b489-6eabd0f88615-operator-scripts\") pod \"nova-cell0-db-create-nvbp2\" (UID: \"8100fc92-5887-4c6e-b489-6eabd0f88615\") " pod="openstack/nova-cell0-db-create-nvbp2" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.725358 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4t4l\" (UniqueName: \"kubernetes.io/projected/8100fc92-5887-4c6e-b489-6eabd0f88615-kube-api-access-r4t4l\") pod \"nova-cell0-db-create-nvbp2\" (UID: \"8100fc92-5887-4c6e-b489-6eabd0f88615\") " pod="openstack/nova-cell0-db-create-nvbp2" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.725455 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8sv8\" (UniqueName: \"kubernetes.io/projected/a58094ef-de28-4ae5-84a9-b74684aca52e-kube-api-access-w8sv8\") pod \"nova-api-db-create-xw5xf\" (UID: \"a58094ef-de28-4ae5-84a9-b74684aca52e\") " pod="openstack/nova-api-db-create-xw5xf" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.725488 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a58094ef-de28-4ae5-84a9-b74684aca52e-operator-scripts\") pod \"nova-api-db-create-xw5xf\" (UID: \"a58094ef-de28-4ae5-84a9-b74684aca52e\") " pod="openstack/nova-api-db-create-xw5xf" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.729350 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-nvbp2"] Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.773164 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.815694 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-5f14-account-create-update-zqtvl"] Jan 04 12:10:43 crc kubenswrapper[5003]: E0104 12:10:43.816200 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="235041d8-b61c-4f18-b352-c07ae69e5c49" containerName="ceilometer-central-agent" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.816221 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="235041d8-b61c-4f18-b352-c07ae69e5c49" containerName="ceilometer-central-agent" Jan 04 12:10:43 crc kubenswrapper[5003]: E0104 12:10:43.816252 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="235041d8-b61c-4f18-b352-c07ae69e5c49" containerName="sg-core" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.816259 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="235041d8-b61c-4f18-b352-c07ae69e5c49" containerName="sg-core" Jan 04 12:10:43 crc kubenswrapper[5003]: E0104 12:10:43.816274 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="235041d8-b61c-4f18-b352-c07ae69e5c49" containerName="proxy-httpd" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.816284 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="235041d8-b61c-4f18-b352-c07ae69e5c49" containerName="proxy-httpd" Jan 04 12:10:43 crc kubenswrapper[5003]: E0104 12:10:43.816304 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="235041d8-b61c-4f18-b352-c07ae69e5c49" containerName="ceilometer-notification-agent" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.816313 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="235041d8-b61c-4f18-b352-c07ae69e5c49" containerName="ceilometer-notification-agent" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.816512 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="235041d8-b61c-4f18-b352-c07ae69e5c49" containerName="ceilometer-central-agent" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.816535 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="235041d8-b61c-4f18-b352-c07ae69e5c49" containerName="proxy-httpd" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.816558 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="235041d8-b61c-4f18-b352-c07ae69e5c49" containerName="sg-core" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.816570 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="235041d8-b61c-4f18-b352-c07ae69e5c49" containerName="ceilometer-notification-agent" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.817374 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5f14-account-create-update-zqtvl" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.820967 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.827477 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/235041d8-b61c-4f18-b352-c07ae69e5c49-log-httpd\") pod \"235041d8-b61c-4f18-b352-c07ae69e5c49\" (UID: \"235041d8-b61c-4f18-b352-c07ae69e5c49\") " Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.827536 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/235041d8-b61c-4f18-b352-c07ae69e5c49-run-httpd\") pod \"235041d8-b61c-4f18-b352-c07ae69e5c49\" (UID: \"235041d8-b61c-4f18-b352-c07ae69e5c49\") " Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.827571 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/235041d8-b61c-4f18-b352-c07ae69e5c49-combined-ca-bundle\") pod \"235041d8-b61c-4f18-b352-c07ae69e5c49\" (UID: \"235041d8-b61c-4f18-b352-c07ae69e5c49\") " Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.827605 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rfd57\" (UniqueName: \"kubernetes.io/projected/235041d8-b61c-4f18-b352-c07ae69e5c49-kube-api-access-rfd57\") pod \"235041d8-b61c-4f18-b352-c07ae69e5c49\" (UID: \"235041d8-b61c-4f18-b352-c07ae69e5c49\") " Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.827654 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/235041d8-b61c-4f18-b352-c07ae69e5c49-sg-core-conf-yaml\") pod \"235041d8-b61c-4f18-b352-c07ae69e5c49\" (UID: \"235041d8-b61c-4f18-b352-c07ae69e5c49\") " Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.827710 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/235041d8-b61c-4f18-b352-c07ae69e5c49-scripts\") pod \"235041d8-b61c-4f18-b352-c07ae69e5c49\" (UID: \"235041d8-b61c-4f18-b352-c07ae69e5c49\") " Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.827736 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/235041d8-b61c-4f18-b352-c07ae69e5c49-config-data\") pod \"235041d8-b61c-4f18-b352-c07ae69e5c49\" (UID: \"235041d8-b61c-4f18-b352-c07ae69e5c49\") " Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.827909 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ncdwq\" (UniqueName: \"kubernetes.io/projected/d2d06f5b-83b1-4aea-9ab1-83eecc92dd98-kube-api-access-ncdwq\") pod \"nova-api-5f14-account-create-update-zqtvl\" (UID: \"d2d06f5b-83b1-4aea-9ab1-83eecc92dd98\") " pod="openstack/nova-api-5f14-account-create-update-zqtvl" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.828005 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8sv8\" (UniqueName: \"kubernetes.io/projected/a58094ef-de28-4ae5-84a9-b74684aca52e-kube-api-access-w8sv8\") pod \"nova-api-db-create-xw5xf\" (UID: \"a58094ef-de28-4ae5-84a9-b74684aca52e\") " pod="openstack/nova-api-db-create-xw5xf" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.828055 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a58094ef-de28-4ae5-84a9-b74684aca52e-operator-scripts\") pod \"nova-api-db-create-xw5xf\" (UID: \"a58094ef-de28-4ae5-84a9-b74684aca52e\") " pod="openstack/nova-api-db-create-xw5xf" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.828107 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8100fc92-5887-4c6e-b489-6eabd0f88615-operator-scripts\") pod \"nova-cell0-db-create-nvbp2\" (UID: \"8100fc92-5887-4c6e-b489-6eabd0f88615\") " pod="openstack/nova-cell0-db-create-nvbp2" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.828134 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d2d06f5b-83b1-4aea-9ab1-83eecc92dd98-operator-scripts\") pod \"nova-api-5f14-account-create-update-zqtvl\" (UID: \"d2d06f5b-83b1-4aea-9ab1-83eecc92dd98\") " pod="openstack/nova-api-5f14-account-create-update-zqtvl" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.828247 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4t4l\" (UniqueName: \"kubernetes.io/projected/8100fc92-5887-4c6e-b489-6eabd0f88615-kube-api-access-r4t4l\") pod \"nova-cell0-db-create-nvbp2\" (UID: \"8100fc92-5887-4c6e-b489-6eabd0f88615\") " pod="openstack/nova-cell0-db-create-nvbp2" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.828331 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/235041d8-b61c-4f18-b352-c07ae69e5c49-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "235041d8-b61c-4f18-b352-c07ae69e5c49" (UID: "235041d8-b61c-4f18-b352-c07ae69e5c49"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.829729 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a58094ef-de28-4ae5-84a9-b74684aca52e-operator-scripts\") pod \"nova-api-db-create-xw5xf\" (UID: \"a58094ef-de28-4ae5-84a9-b74684aca52e\") " pod="openstack/nova-api-db-create-xw5xf" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.831164 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8100fc92-5887-4c6e-b489-6eabd0f88615-operator-scripts\") pod \"nova-cell0-db-create-nvbp2\" (UID: \"8100fc92-5887-4c6e-b489-6eabd0f88615\") " pod="openstack/nova-cell0-db-create-nvbp2" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.833177 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/235041d8-b61c-4f18-b352-c07ae69e5c49-scripts" (OuterVolumeSpecName: "scripts") pod "235041d8-b61c-4f18-b352-c07ae69e5c49" (UID: "235041d8-b61c-4f18-b352-c07ae69e5c49"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.842481 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/235041d8-b61c-4f18-b352-c07ae69e5c49-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "235041d8-b61c-4f18-b352-c07ae69e5c49" (UID: "235041d8-b61c-4f18-b352-c07ae69e5c49"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.844150 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-5f14-account-create-update-zqtvl"] Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.849981 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/235041d8-b61c-4f18-b352-c07ae69e5c49-kube-api-access-rfd57" (OuterVolumeSpecName: "kube-api-access-rfd57") pod "235041d8-b61c-4f18-b352-c07ae69e5c49" (UID: "235041d8-b61c-4f18-b352-c07ae69e5c49"). InnerVolumeSpecName "kube-api-access-rfd57". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.859934 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8sv8\" (UniqueName: \"kubernetes.io/projected/a58094ef-de28-4ae5-84a9-b74684aca52e-kube-api-access-w8sv8\") pod \"nova-api-db-create-xw5xf\" (UID: \"a58094ef-de28-4ae5-84a9-b74684aca52e\") " pod="openstack/nova-api-db-create-xw5xf" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.868651 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4t4l\" (UniqueName: \"kubernetes.io/projected/8100fc92-5887-4c6e-b489-6eabd0f88615-kube-api-access-r4t4l\") pod \"nova-cell0-db-create-nvbp2\" (UID: \"8100fc92-5887-4c6e-b489-6eabd0f88615\") " pod="openstack/nova-cell0-db-create-nvbp2" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.911090 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/235041d8-b61c-4f18-b352-c07ae69e5c49-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "235041d8-b61c-4f18-b352-c07ae69e5c49" (UID: "235041d8-b61c-4f18-b352-c07ae69e5c49"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.919966 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-v7b9n"] Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.921473 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-v7b9n" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.930025 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8ff01a27-abff-42af-a7dd-f63b6ade45bf-operator-scripts\") pod \"nova-cell1-db-create-v7b9n\" (UID: \"8ff01a27-abff-42af-a7dd-f63b6ade45bf\") " pod="openstack/nova-cell1-db-create-v7b9n" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.930084 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d2d06f5b-83b1-4aea-9ab1-83eecc92dd98-operator-scripts\") pod \"nova-api-5f14-account-create-update-zqtvl\" (UID: \"d2d06f5b-83b1-4aea-9ab1-83eecc92dd98\") " pod="openstack/nova-api-5f14-account-create-update-zqtvl" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.930136 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9qjf\" (UniqueName: \"kubernetes.io/projected/8ff01a27-abff-42af-a7dd-f63b6ade45bf-kube-api-access-n9qjf\") pod \"nova-cell1-db-create-v7b9n\" (UID: \"8ff01a27-abff-42af-a7dd-f63b6ade45bf\") " pod="openstack/nova-cell1-db-create-v7b9n" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.930233 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ncdwq\" (UniqueName: \"kubernetes.io/projected/d2d06f5b-83b1-4aea-9ab1-83eecc92dd98-kube-api-access-ncdwq\") pod \"nova-api-5f14-account-create-update-zqtvl\" (UID: \"d2d06f5b-83b1-4aea-9ab1-83eecc92dd98\") " pod="openstack/nova-api-5f14-account-create-update-zqtvl" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.930312 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/235041d8-b61c-4f18-b352-c07ae69e5c49-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.930325 5003 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/235041d8-b61c-4f18-b352-c07ae69e5c49-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.930336 5003 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/235041d8-b61c-4f18-b352-c07ae69e5c49-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.930347 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rfd57\" (UniqueName: \"kubernetes.io/projected/235041d8-b61c-4f18-b352-c07ae69e5c49-kube-api-access-rfd57\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.930357 5003 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/235041d8-b61c-4f18-b352-c07ae69e5c49-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.930797 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d2d06f5b-83b1-4aea-9ab1-83eecc92dd98-operator-scripts\") pod \"nova-api-5f14-account-create-update-zqtvl\" (UID: \"d2d06f5b-83b1-4aea-9ab1-83eecc92dd98\") " pod="openstack/nova-api-5f14-account-create-update-zqtvl" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.936672 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-xw5xf" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.946365 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-v7b9n"] Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.953203 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ncdwq\" (UniqueName: \"kubernetes.io/projected/d2d06f5b-83b1-4aea-9ab1-83eecc92dd98-kube-api-access-ncdwq\") pod \"nova-api-5f14-account-create-update-zqtvl\" (UID: \"d2d06f5b-83b1-4aea-9ab1-83eecc92dd98\") " pod="openstack/nova-api-5f14-account-create-update-zqtvl" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.974812 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/235041d8-b61c-4f18-b352-c07ae69e5c49-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "235041d8-b61c-4f18-b352-c07ae69e5c49" (UID: "235041d8-b61c-4f18-b352-c07ae69e5c49"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:43 crc kubenswrapper[5003]: I0104 12:10:43.993642 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-6885ccbc9-jntqm"] Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.023930 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-ccc2-account-create-update-ftb5c"] Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.026500 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-ccc2-account-create-update-ftb5c" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.030582 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.032125 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8ff01a27-abff-42af-a7dd-f63b6ade45bf-operator-scripts\") pod \"nova-cell1-db-create-v7b9n\" (UID: \"8ff01a27-abff-42af-a7dd-f63b6ade45bf\") " pod="openstack/nova-cell1-db-create-v7b9n" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.032223 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9qjf\" (UniqueName: \"kubernetes.io/projected/8ff01a27-abff-42af-a7dd-f63b6ade45bf-kube-api-access-n9qjf\") pod \"nova-cell1-db-create-v7b9n\" (UID: \"8ff01a27-abff-42af-a7dd-f63b6ade45bf\") " pod="openstack/nova-cell1-db-create-v7b9n" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.032656 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/235041d8-b61c-4f18-b352-c07ae69e5c49-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.033697 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8ff01a27-abff-42af-a7dd-f63b6ade45bf-operator-scripts\") pod \"nova-cell1-db-create-v7b9n\" (UID: \"8ff01a27-abff-42af-a7dd-f63b6ade45bf\") " pod="openstack/nova-cell1-db-create-v7b9n" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.035307 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-ccc2-account-create-update-ftb5c"] Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.036927 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/235041d8-b61c-4f18-b352-c07ae69e5c49-config-data" (OuterVolumeSpecName: "config-data") pod "235041d8-b61c-4f18-b352-c07ae69e5c49" (UID: "235041d8-b61c-4f18-b352-c07ae69e5c49"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.040664 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-nvbp2" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.051962 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9qjf\" (UniqueName: \"kubernetes.io/projected/8ff01a27-abff-42af-a7dd-f63b6ade45bf-kube-api-access-n9qjf\") pod \"nova-cell1-db-create-v7b9n\" (UID: \"8ff01a27-abff-42af-a7dd-f63b6ade45bf\") " pod="openstack/nova-cell1-db-create-v7b9n" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.127447 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"627b5a59-3da1-4130-92b1-94fcfea8efd4","Type":"ContainerStarted","Data":"72b3f2ee266e67b1cc81df1e410f3bd7f96dcf54ecba5f2d66edb9857899172e"} Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.137761 5003 generic.go:334] "Generic (PLEG): container finished" podID="235041d8-b61c-4f18-b352-c07ae69e5c49" containerID="713301b7ac7149d0d418c7d8c3bcfff41bc88b397b49860b2763db0f75492542" exitCode=0 Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.137829 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"235041d8-b61c-4f18-b352-c07ae69e5c49","Type":"ContainerDied","Data":"713301b7ac7149d0d418c7d8c3bcfff41bc88b397b49860b2763db0f75492542"} Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.137856 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"235041d8-b61c-4f18-b352-c07ae69e5c49","Type":"ContainerDied","Data":"00a243e4e8ed59fcbadfe7e64a3bbccfbe59c2ac2733550d612bc3f828eabc08"} Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.137873 5003 scope.go:117] "RemoveContainer" containerID="62db70e3669f5fcb779c3b4b14cfdd8579a7b22e6e378d1a2e0afbf72dc62065" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.137999 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.145180 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hs625\" (UniqueName: \"kubernetes.io/projected/54218530-45fd-460c-b755-49b5f28b0f02-kube-api-access-hs625\") pod \"nova-cell0-ccc2-account-create-update-ftb5c\" (UID: \"54218530-45fd-460c-b755-49b5f28b0f02\") " pod="openstack/nova-cell0-ccc2-account-create-update-ftb5c" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.145272 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/54218530-45fd-460c-b755-49b5f28b0f02-operator-scripts\") pod \"nova-cell0-ccc2-account-create-update-ftb5c\" (UID: \"54218530-45fd-460c-b755-49b5f28b0f02\") " pod="openstack/nova-cell0-ccc2-account-create-update-ftb5c" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.145696 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/235041d8-b61c-4f18-b352-c07ae69e5c49-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.156355 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5f14-account-create-update-zqtvl" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.159297 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.248281645 podStartE2EDuration="16.159270753s" podCreationTimestamp="2026-01-04 12:10:28 +0000 UTC" firstStartedPulling="2026-01-04 12:10:29.393845154 +0000 UTC m=+1344.866874995" lastFinishedPulling="2026-01-04 12:10:43.304834262 +0000 UTC m=+1358.777864103" observedRunningTime="2026-01-04 12:10:44.148356258 +0000 UTC m=+1359.621386119" watchObservedRunningTime="2026-01-04 12:10:44.159270753 +0000 UTC m=+1359.632300594" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.163318 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6885ccbc9-jntqm" event={"ID":"afad0966-8385-444b-9eed-8418c0a49b2a","Type":"ContainerStarted","Data":"ea536b2079f8710d9f7de8266986bc2d261d9e8bd785f2f18c978054d29b2b31"} Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.212373 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.244215 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.248314 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hs625\" (UniqueName: \"kubernetes.io/projected/54218530-45fd-460c-b755-49b5f28b0f02-kube-api-access-hs625\") pod \"nova-cell0-ccc2-account-create-update-ftb5c\" (UID: \"54218530-45fd-460c-b755-49b5f28b0f02\") " pod="openstack/nova-cell0-ccc2-account-create-update-ftb5c" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.248422 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/54218530-45fd-460c-b755-49b5f28b0f02-operator-scripts\") pod \"nova-cell0-ccc2-account-create-update-ftb5c\" (UID: \"54218530-45fd-460c-b755-49b5f28b0f02\") " pod="openstack/nova-cell0-ccc2-account-create-update-ftb5c" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.249341 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/54218530-45fd-460c-b755-49b5f28b0f02-operator-scripts\") pod \"nova-cell0-ccc2-account-create-update-ftb5c\" (UID: \"54218530-45fd-460c-b755-49b5f28b0f02\") " pod="openstack/nova-cell0-ccc2-account-create-update-ftb5c" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.255860 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-v7b9n" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.282165 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.298030 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hs625\" (UniqueName: \"kubernetes.io/projected/54218530-45fd-460c-b755-49b5f28b0f02-kube-api-access-hs625\") pod \"nova-cell0-ccc2-account-create-update-ftb5c\" (UID: \"54218530-45fd-460c-b755-49b5f28b0f02\") " pod="openstack/nova-cell0-ccc2-account-create-update-ftb5c" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.299161 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.301566 5003 scope.go:117] "RemoveContainer" containerID="8f68d0e8c69ee6303a4ae0263486d6f240c009e3360e4375ca784cbc05abb1f2" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.303564 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.303979 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.324392 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.364167 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-ccc2-account-create-update-ftb5c" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.366479 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-8552-account-create-update-69dtz"] Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.367967 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-8552-account-create-update-69dtz" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.385667 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.396075 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-8552-account-create-update-69dtz"] Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.451487 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3939a53e-a630-4d14-b88a-46820430b3f1-log-httpd\") pod \"ceilometer-0\" (UID: \"3939a53e-a630-4d14-b88a-46820430b3f1\") " pod="openstack/ceilometer-0" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.451527 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3939a53e-a630-4d14-b88a-46820430b3f1-config-data\") pod \"ceilometer-0\" (UID: \"3939a53e-a630-4d14-b88a-46820430b3f1\") " pod="openstack/ceilometer-0" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.451582 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3939a53e-a630-4d14-b88a-46820430b3f1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3939a53e-a630-4d14-b88a-46820430b3f1\") " pod="openstack/ceilometer-0" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.451613 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3939a53e-a630-4d14-b88a-46820430b3f1-scripts\") pod \"ceilometer-0\" (UID: \"3939a53e-a630-4d14-b88a-46820430b3f1\") " pod="openstack/ceilometer-0" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.451667 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3939a53e-a630-4d14-b88a-46820430b3f1-run-httpd\") pod \"ceilometer-0\" (UID: \"3939a53e-a630-4d14-b88a-46820430b3f1\") " pod="openstack/ceilometer-0" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.451724 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qd4sf\" (UniqueName: \"kubernetes.io/projected/3939a53e-a630-4d14-b88a-46820430b3f1-kube-api-access-qd4sf\") pod \"ceilometer-0\" (UID: \"3939a53e-a630-4d14-b88a-46820430b3f1\") " pod="openstack/ceilometer-0" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.451753 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3939a53e-a630-4d14-b88a-46820430b3f1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3939a53e-a630-4d14-b88a-46820430b3f1\") " pod="openstack/ceilometer-0" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.489050 5003 scope.go:117] "RemoveContainer" containerID="713301b7ac7149d0d418c7d8c3bcfff41bc88b397b49860b2763db0f75492542" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.530498 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-xw5xf"] Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.553711 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3939a53e-a630-4d14-b88a-46820430b3f1-log-httpd\") pod \"ceilometer-0\" (UID: \"3939a53e-a630-4d14-b88a-46820430b3f1\") " pod="openstack/ceilometer-0" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.553766 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3939a53e-a630-4d14-b88a-46820430b3f1-config-data\") pod \"ceilometer-0\" (UID: \"3939a53e-a630-4d14-b88a-46820430b3f1\") " pod="openstack/ceilometer-0" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.553827 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3939a53e-a630-4d14-b88a-46820430b3f1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3939a53e-a630-4d14-b88a-46820430b3f1\") " pod="openstack/ceilometer-0" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.553859 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3939a53e-a630-4d14-b88a-46820430b3f1-scripts\") pod \"ceilometer-0\" (UID: \"3939a53e-a630-4d14-b88a-46820430b3f1\") " pod="openstack/ceilometer-0" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.553890 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/209bf3b8-aa8b-46cb-b41f-1737c40c7522-operator-scripts\") pod \"nova-cell1-8552-account-create-update-69dtz\" (UID: \"209bf3b8-aa8b-46cb-b41f-1737c40c7522\") " pod="openstack/nova-cell1-8552-account-create-update-69dtz" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.553940 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3939a53e-a630-4d14-b88a-46820430b3f1-run-httpd\") pod \"ceilometer-0\" (UID: \"3939a53e-a630-4d14-b88a-46820430b3f1\") " pod="openstack/ceilometer-0" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.553964 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27ncx\" (UniqueName: \"kubernetes.io/projected/209bf3b8-aa8b-46cb-b41f-1737c40c7522-kube-api-access-27ncx\") pod \"nova-cell1-8552-account-create-update-69dtz\" (UID: \"209bf3b8-aa8b-46cb-b41f-1737c40c7522\") " pod="openstack/nova-cell1-8552-account-create-update-69dtz" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.554050 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qd4sf\" (UniqueName: \"kubernetes.io/projected/3939a53e-a630-4d14-b88a-46820430b3f1-kube-api-access-qd4sf\") pod \"ceilometer-0\" (UID: \"3939a53e-a630-4d14-b88a-46820430b3f1\") " pod="openstack/ceilometer-0" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.554082 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3939a53e-a630-4d14-b88a-46820430b3f1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3939a53e-a630-4d14-b88a-46820430b3f1\") " pod="openstack/ceilometer-0" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.556601 5003 scope.go:117] "RemoveContainer" containerID="9c193888e3c481aed219acf655f13d78e575780d3531aedb6e32f5b693f84b85" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.557826 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3939a53e-a630-4d14-b88a-46820430b3f1-run-httpd\") pod \"ceilometer-0\" (UID: \"3939a53e-a630-4d14-b88a-46820430b3f1\") " pod="openstack/ceilometer-0" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.557956 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3939a53e-a630-4d14-b88a-46820430b3f1-log-httpd\") pod \"ceilometer-0\" (UID: \"3939a53e-a630-4d14-b88a-46820430b3f1\") " pod="openstack/ceilometer-0" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.563555 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3939a53e-a630-4d14-b88a-46820430b3f1-config-data\") pod \"ceilometer-0\" (UID: \"3939a53e-a630-4d14-b88a-46820430b3f1\") " pod="openstack/ceilometer-0" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.566554 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3939a53e-a630-4d14-b88a-46820430b3f1-scripts\") pod \"ceilometer-0\" (UID: \"3939a53e-a630-4d14-b88a-46820430b3f1\") " pod="openstack/ceilometer-0" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.568353 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3939a53e-a630-4d14-b88a-46820430b3f1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3939a53e-a630-4d14-b88a-46820430b3f1\") " pod="openstack/ceilometer-0" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.568589 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3939a53e-a630-4d14-b88a-46820430b3f1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3939a53e-a630-4d14-b88a-46820430b3f1\") " pod="openstack/ceilometer-0" Jan 04 12:10:44 crc kubenswrapper[5003]: W0104 12:10:44.573268 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda58094ef_de28_4ae5_84a9_b74684aca52e.slice/crio-4419b157332b336edead117098cb7385dede0c92be181abac5484cf5adb59467 WatchSource:0}: Error finding container 4419b157332b336edead117098cb7385dede0c92be181abac5484cf5adb59467: Status 404 returned error can't find the container with id 4419b157332b336edead117098cb7385dede0c92be181abac5484cf5adb59467 Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.584307 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qd4sf\" (UniqueName: \"kubernetes.io/projected/3939a53e-a630-4d14-b88a-46820430b3f1-kube-api-access-qd4sf\") pod \"ceilometer-0\" (UID: \"3939a53e-a630-4d14-b88a-46820430b3f1\") " pod="openstack/ceilometer-0" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.607411 5003 scope.go:117] "RemoveContainer" containerID="62db70e3669f5fcb779c3b4b14cfdd8579a7b22e6e378d1a2e0afbf72dc62065" Jan 04 12:10:44 crc kubenswrapper[5003]: E0104 12:10:44.608101 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62db70e3669f5fcb779c3b4b14cfdd8579a7b22e6e378d1a2e0afbf72dc62065\": container with ID starting with 62db70e3669f5fcb779c3b4b14cfdd8579a7b22e6e378d1a2e0afbf72dc62065 not found: ID does not exist" containerID="62db70e3669f5fcb779c3b4b14cfdd8579a7b22e6e378d1a2e0afbf72dc62065" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.608189 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62db70e3669f5fcb779c3b4b14cfdd8579a7b22e6e378d1a2e0afbf72dc62065"} err="failed to get container status \"62db70e3669f5fcb779c3b4b14cfdd8579a7b22e6e378d1a2e0afbf72dc62065\": rpc error: code = NotFound desc = could not find container \"62db70e3669f5fcb779c3b4b14cfdd8579a7b22e6e378d1a2e0afbf72dc62065\": container with ID starting with 62db70e3669f5fcb779c3b4b14cfdd8579a7b22e6e378d1a2e0afbf72dc62065 not found: ID does not exist" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.608237 5003 scope.go:117] "RemoveContainer" containerID="8f68d0e8c69ee6303a4ae0263486d6f240c009e3360e4375ca784cbc05abb1f2" Jan 04 12:10:44 crc kubenswrapper[5003]: E0104 12:10:44.608645 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f68d0e8c69ee6303a4ae0263486d6f240c009e3360e4375ca784cbc05abb1f2\": container with ID starting with 8f68d0e8c69ee6303a4ae0263486d6f240c009e3360e4375ca784cbc05abb1f2 not found: ID does not exist" containerID="8f68d0e8c69ee6303a4ae0263486d6f240c009e3360e4375ca784cbc05abb1f2" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.608698 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f68d0e8c69ee6303a4ae0263486d6f240c009e3360e4375ca784cbc05abb1f2"} err="failed to get container status \"8f68d0e8c69ee6303a4ae0263486d6f240c009e3360e4375ca784cbc05abb1f2\": rpc error: code = NotFound desc = could not find container \"8f68d0e8c69ee6303a4ae0263486d6f240c009e3360e4375ca784cbc05abb1f2\": container with ID starting with 8f68d0e8c69ee6303a4ae0263486d6f240c009e3360e4375ca784cbc05abb1f2 not found: ID does not exist" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.608730 5003 scope.go:117] "RemoveContainer" containerID="713301b7ac7149d0d418c7d8c3bcfff41bc88b397b49860b2763db0f75492542" Jan 04 12:10:44 crc kubenswrapper[5003]: E0104 12:10:44.609035 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"713301b7ac7149d0d418c7d8c3bcfff41bc88b397b49860b2763db0f75492542\": container with ID starting with 713301b7ac7149d0d418c7d8c3bcfff41bc88b397b49860b2763db0f75492542 not found: ID does not exist" containerID="713301b7ac7149d0d418c7d8c3bcfff41bc88b397b49860b2763db0f75492542" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.609069 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"713301b7ac7149d0d418c7d8c3bcfff41bc88b397b49860b2763db0f75492542"} err="failed to get container status \"713301b7ac7149d0d418c7d8c3bcfff41bc88b397b49860b2763db0f75492542\": rpc error: code = NotFound desc = could not find container \"713301b7ac7149d0d418c7d8c3bcfff41bc88b397b49860b2763db0f75492542\": container with ID starting with 713301b7ac7149d0d418c7d8c3bcfff41bc88b397b49860b2763db0f75492542 not found: ID does not exist" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.609087 5003 scope.go:117] "RemoveContainer" containerID="9c193888e3c481aed219acf655f13d78e575780d3531aedb6e32f5b693f84b85" Jan 04 12:10:44 crc kubenswrapper[5003]: E0104 12:10:44.609339 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c193888e3c481aed219acf655f13d78e575780d3531aedb6e32f5b693f84b85\": container with ID starting with 9c193888e3c481aed219acf655f13d78e575780d3531aedb6e32f5b693f84b85 not found: ID does not exist" containerID="9c193888e3c481aed219acf655f13d78e575780d3531aedb6e32f5b693f84b85" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.609366 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c193888e3c481aed219acf655f13d78e575780d3531aedb6e32f5b693f84b85"} err="failed to get container status \"9c193888e3c481aed219acf655f13d78e575780d3531aedb6e32f5b693f84b85\": rpc error: code = NotFound desc = could not find container \"9c193888e3c481aed219acf655f13d78e575780d3531aedb6e32f5b693f84b85\": container with ID starting with 9c193888e3c481aed219acf655f13d78e575780d3531aedb6e32f5b693f84b85 not found: ID does not exist" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.661562 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/209bf3b8-aa8b-46cb-b41f-1737c40c7522-operator-scripts\") pod \"nova-cell1-8552-account-create-update-69dtz\" (UID: \"209bf3b8-aa8b-46cb-b41f-1737c40c7522\") " pod="openstack/nova-cell1-8552-account-create-update-69dtz" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.661756 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27ncx\" (UniqueName: \"kubernetes.io/projected/209bf3b8-aa8b-46cb-b41f-1737c40c7522-kube-api-access-27ncx\") pod \"nova-cell1-8552-account-create-update-69dtz\" (UID: \"209bf3b8-aa8b-46cb-b41f-1737c40c7522\") " pod="openstack/nova-cell1-8552-account-create-update-69dtz" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.665034 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/209bf3b8-aa8b-46cb-b41f-1737c40c7522-operator-scripts\") pod \"nova-cell1-8552-account-create-update-69dtz\" (UID: \"209bf3b8-aa8b-46cb-b41f-1737c40c7522\") " pod="openstack/nova-cell1-8552-account-create-update-69dtz" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.685493 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27ncx\" (UniqueName: \"kubernetes.io/projected/209bf3b8-aa8b-46cb-b41f-1737c40c7522-kube-api-access-27ncx\") pod \"nova-cell1-8552-account-create-update-69dtz\" (UID: \"209bf3b8-aa8b-46cb-b41f-1737c40c7522\") " pod="openstack/nova-cell1-8552-account-create-update-69dtz" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.789653 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.818783 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-8552-account-create-update-69dtz" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.868182 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="235041d8-b61c-4f18-b352-c07ae69e5c49" path="/var/lib/kubelet/pods/235041d8-b61c-4f18-b352-c07ae69e5c49/volumes" Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.870137 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-nvbp2"] Jan 04 12:10:44 crc kubenswrapper[5003]: I0104 12:10:44.985250 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-v7b9n"] Jan 04 12:10:45 crc kubenswrapper[5003]: I0104 12:10:45.073900 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-5f14-account-create-update-zqtvl"] Jan 04 12:10:45 crc kubenswrapper[5003]: I0104 12:10:45.179696 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-ccc2-account-create-update-ftb5c"] Jan 04 12:10:45 crc kubenswrapper[5003]: I0104 12:10:45.242739 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5f14-account-create-update-zqtvl" event={"ID":"d2d06f5b-83b1-4aea-9ab1-83eecc92dd98","Type":"ContainerStarted","Data":"dd989c698aa03c1080fb189f1ad32e9ca3fb0e0eb001edabcef6607600fccb9e"} Jan 04 12:10:45 crc kubenswrapper[5003]: I0104 12:10:45.246814 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-v7b9n" event={"ID":"8ff01a27-abff-42af-a7dd-f63b6ade45bf","Type":"ContainerStarted","Data":"cdc30f79d82e58db6eb1b16527f3a322798aa98f14c5ee41326aa0b4905b6594"} Jan 04 12:10:45 crc kubenswrapper[5003]: I0104 12:10:45.258561 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-xw5xf" event={"ID":"a58094ef-de28-4ae5-84a9-b74684aca52e","Type":"ContainerStarted","Data":"9bf6d9666779e59a21fef3698f78ddafaa3c265c28b2d8fff1ebaadcb3a4f960"} Jan 04 12:10:45 crc kubenswrapper[5003]: I0104 12:10:45.258617 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-xw5xf" event={"ID":"a58094ef-de28-4ae5-84a9-b74684aca52e","Type":"ContainerStarted","Data":"4419b157332b336edead117098cb7385dede0c92be181abac5484cf5adb59467"} Jan 04 12:10:45 crc kubenswrapper[5003]: I0104 12:10:45.285182 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6885ccbc9-jntqm" event={"ID":"afad0966-8385-444b-9eed-8418c0a49b2a","Type":"ContainerStarted","Data":"7f17c88fe39230076ac013b10c5ff4f44f766a32bb39a15570523798fd0cee22"} Jan 04 12:10:45 crc kubenswrapper[5003]: I0104 12:10:45.285239 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6885ccbc9-jntqm" event={"ID":"afad0966-8385-444b-9eed-8418c0a49b2a","Type":"ContainerStarted","Data":"6615326903f6990ecae76e7fe834c4ecc3fe23051337c2d5af2308f338730040"} Jan 04 12:10:45 crc kubenswrapper[5003]: I0104 12:10:45.285721 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-6885ccbc9-jntqm" Jan 04 12:10:45 crc kubenswrapper[5003]: I0104 12:10:45.285769 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-6885ccbc9-jntqm" Jan 04 12:10:45 crc kubenswrapper[5003]: I0104 12:10:45.309960 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-nvbp2" event={"ID":"8100fc92-5887-4c6e-b489-6eabd0f88615","Type":"ContainerStarted","Data":"2e181935d93995a5be9b3206c4e64b239dd94de57e9a1f283b8be0c02440f3ff"} Jan 04 12:10:45 crc kubenswrapper[5003]: I0104 12:10:45.341546 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-xw5xf" podStartSLOduration=2.341515011 podStartE2EDuration="2.341515011s" podCreationTimestamp="2026-01-04 12:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:10:45.276171603 +0000 UTC m=+1360.749201444" watchObservedRunningTime="2026-01-04 12:10:45.341515011 +0000 UTC m=+1360.814544852" Jan 04 12:10:45 crc kubenswrapper[5003]: I0104 12:10:45.385255 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-6885ccbc9-jntqm" podStartSLOduration=9.385233123999999 podStartE2EDuration="9.385233124s" podCreationTimestamp="2026-01-04 12:10:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:10:45.322656038 +0000 UTC m=+1360.795685899" watchObservedRunningTime="2026-01-04 12:10:45.385233124 +0000 UTC m=+1360.858262965" Jan 04 12:10:45 crc kubenswrapper[5003]: I0104 12:10:45.399199 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-8552-account-create-update-69dtz"] Jan 04 12:10:45 crc kubenswrapper[5003]: W0104 12:10:45.423317 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod209bf3b8_aa8b_46cb_b41f_1737c40c7522.slice/crio-2b2dac0e94124b8aaecc363653f2c48509cae4a458e5546639d83bba3320abc6 WatchSource:0}: Error finding container 2b2dac0e94124b8aaecc363653f2c48509cae4a458e5546639d83bba3320abc6: Status 404 returned error can't find the container with id 2b2dac0e94124b8aaecc363653f2c48509cae4a458e5546639d83bba3320abc6 Jan 04 12:10:45 crc kubenswrapper[5003]: W0104 12:10:45.589148 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3939a53e_a630_4d14_b88a_46820430b3f1.slice/crio-c78cfa6282bbdbed650a9975c7951df5b64234edcace0d0c2ce6dd1ca5c1cc91 WatchSource:0}: Error finding container c78cfa6282bbdbed650a9975c7951df5b64234edcace0d0c2ce6dd1ca5c1cc91: Status 404 returned error can't find the container with id c78cfa6282bbdbed650a9975c7951df5b64234edcace0d0c2ce6dd1ca5c1cc91 Jan 04 12:10:45 crc kubenswrapper[5003]: I0104 12:10:45.595470 5003 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 04 12:10:45 crc kubenswrapper[5003]: I0104 12:10:45.605955 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:10:46 crc kubenswrapper[5003]: I0104 12:10:46.336214 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-8552-account-create-update-69dtz" event={"ID":"209bf3b8-aa8b-46cb-b41f-1737c40c7522","Type":"ContainerStarted","Data":"209b2744b2890a8d622a4e6eeef6ed901fe9df1453dae9e89661d1909b82f9c9"} Jan 04 12:10:46 crc kubenswrapper[5003]: I0104 12:10:46.336717 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-8552-account-create-update-69dtz" event={"ID":"209bf3b8-aa8b-46cb-b41f-1737c40c7522","Type":"ContainerStarted","Data":"2b2dac0e94124b8aaecc363653f2c48509cae4a458e5546639d83bba3320abc6"} Jan 04 12:10:46 crc kubenswrapper[5003]: I0104 12:10:46.346893 5003 generic.go:334] "Generic (PLEG): container finished" podID="8100fc92-5887-4c6e-b489-6eabd0f88615" containerID="43613f0b2d9f36694ac1cf9dbb6e8fad1792484c1daf571f046d4122100383cb" exitCode=0 Jan 04 12:10:46 crc kubenswrapper[5003]: I0104 12:10:46.346973 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-nvbp2" event={"ID":"8100fc92-5887-4c6e-b489-6eabd0f88615","Type":"ContainerDied","Data":"43613f0b2d9f36694ac1cf9dbb6e8fad1792484c1daf571f046d4122100383cb"} Jan 04 12:10:46 crc kubenswrapper[5003]: I0104 12:10:46.357333 5003 generic.go:334] "Generic (PLEG): container finished" podID="d2d06f5b-83b1-4aea-9ab1-83eecc92dd98" containerID="c550f3bba36226c67dd1137b1b3ff5c808da2e64272379394c767b8103a8b79b" exitCode=0 Jan 04 12:10:46 crc kubenswrapper[5003]: I0104 12:10:46.357462 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5f14-account-create-update-zqtvl" event={"ID":"d2d06f5b-83b1-4aea-9ab1-83eecc92dd98","Type":"ContainerDied","Data":"c550f3bba36226c67dd1137b1b3ff5c808da2e64272379394c767b8103a8b79b"} Jan 04 12:10:46 crc kubenswrapper[5003]: I0104 12:10:46.366454 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-8552-account-create-update-69dtz" podStartSLOduration=2.3664256679999998 podStartE2EDuration="2.366425668s" podCreationTimestamp="2026-01-04 12:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:10:46.357546226 +0000 UTC m=+1361.830576067" watchObservedRunningTime="2026-01-04 12:10:46.366425668 +0000 UTC m=+1361.839455509" Jan 04 12:10:46 crc kubenswrapper[5003]: I0104 12:10:46.378022 5003 generic.go:334] "Generic (PLEG): container finished" podID="8ff01a27-abff-42af-a7dd-f63b6ade45bf" containerID="a804f524ad9a1085cf02a12ec20839c0a9904ba8eea3edaee560b0567459045c" exitCode=0 Jan 04 12:10:46 crc kubenswrapper[5003]: I0104 12:10:46.378115 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-v7b9n" event={"ID":"8ff01a27-abff-42af-a7dd-f63b6ade45bf","Type":"ContainerDied","Data":"a804f524ad9a1085cf02a12ec20839c0a9904ba8eea3edaee560b0567459045c"} Jan 04 12:10:46 crc kubenswrapper[5003]: I0104 12:10:46.395525 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-ccc2-account-create-update-ftb5c" event={"ID":"54218530-45fd-460c-b755-49b5f28b0f02","Type":"ContainerStarted","Data":"221aee83cc0b9561b233e31890dcf7f9cf62d89ea0a4dba0e02cb11f163d72c5"} Jan 04 12:10:46 crc kubenswrapper[5003]: I0104 12:10:46.395581 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-ccc2-account-create-update-ftb5c" event={"ID":"54218530-45fd-460c-b755-49b5f28b0f02","Type":"ContainerStarted","Data":"018dec9d3a435cb24bd4b733250964f05446054425bf00df08a57d1e83d102c5"} Jan 04 12:10:46 crc kubenswrapper[5003]: I0104 12:10:46.407373 5003 generic.go:334] "Generic (PLEG): container finished" podID="a58094ef-de28-4ae5-84a9-b74684aca52e" containerID="9bf6d9666779e59a21fef3698f78ddafaa3c265c28b2d8fff1ebaadcb3a4f960" exitCode=0 Jan 04 12:10:46 crc kubenswrapper[5003]: I0104 12:10:46.407473 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-xw5xf" event={"ID":"a58094ef-de28-4ae5-84a9-b74684aca52e","Type":"ContainerDied","Data":"9bf6d9666779e59a21fef3698f78ddafaa3c265c28b2d8fff1ebaadcb3a4f960"} Jan 04 12:10:46 crc kubenswrapper[5003]: I0104 12:10:46.422700 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3939a53e-a630-4d14-b88a-46820430b3f1","Type":"ContainerStarted","Data":"c78cfa6282bbdbed650a9975c7951df5b64234edcace0d0c2ce6dd1ca5c1cc91"} Jan 04 12:10:46 crc kubenswrapper[5003]: I0104 12:10:46.518567 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-ccc2-account-create-update-ftb5c" podStartSLOduration=3.518546753 podStartE2EDuration="3.518546753s" podCreationTimestamp="2026-01-04 12:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:10:46.503542931 +0000 UTC m=+1361.976572772" watchObservedRunningTime="2026-01-04 12:10:46.518546753 +0000 UTC m=+1361.991576594" Jan 04 12:10:46 crc kubenswrapper[5003]: E0104 12:10:46.525371 5003 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod209bf3b8_aa8b_46cb_b41f_1737c40c7522.slice/crio-209b2744b2890a8d622a4e6eeef6ed901fe9df1453dae9e89661d1909b82f9c9.scope\": RecentStats: unable to find data in memory cache]" Jan 04 12:10:46 crc kubenswrapper[5003]: I0104 12:10:46.991592 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:10:46 crc kubenswrapper[5003]: I0104 12:10:46.991911 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="328505b1-7d4b-44ae-a5d1-f77f22f2e79c" containerName="glance-log" containerID="cri-o://b03e7d885801f8cce4056f2883bd7034d6edbed6543308c3dd26b0305ab4dfa2" gracePeriod=30 Jan 04 12:10:46 crc kubenswrapper[5003]: I0104 12:10:46.992339 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="328505b1-7d4b-44ae-a5d1-f77f22f2e79c" containerName="glance-httpd" containerID="cri-o://7ab74bdd5825ef110386c1d6b4e891b78cb32a067feea2f27d8c553d6f1324ed" gracePeriod=30 Jan 04 12:10:47 crc kubenswrapper[5003]: I0104 12:10:47.433185 5003 generic.go:334] "Generic (PLEG): container finished" podID="54218530-45fd-460c-b755-49b5f28b0f02" containerID="221aee83cc0b9561b233e31890dcf7f9cf62d89ea0a4dba0e02cb11f163d72c5" exitCode=0 Jan 04 12:10:47 crc kubenswrapper[5003]: I0104 12:10:47.433276 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-ccc2-account-create-update-ftb5c" event={"ID":"54218530-45fd-460c-b755-49b5f28b0f02","Type":"ContainerDied","Data":"221aee83cc0b9561b233e31890dcf7f9cf62d89ea0a4dba0e02cb11f163d72c5"} Jan 04 12:10:47 crc kubenswrapper[5003]: I0104 12:10:47.435942 5003 generic.go:334] "Generic (PLEG): container finished" podID="328505b1-7d4b-44ae-a5d1-f77f22f2e79c" containerID="b03e7d885801f8cce4056f2883bd7034d6edbed6543308c3dd26b0305ab4dfa2" exitCode=143 Jan 04 12:10:47 crc kubenswrapper[5003]: I0104 12:10:47.436005 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"328505b1-7d4b-44ae-a5d1-f77f22f2e79c","Type":"ContainerDied","Data":"b03e7d885801f8cce4056f2883bd7034d6edbed6543308c3dd26b0305ab4dfa2"} Jan 04 12:10:47 crc kubenswrapper[5003]: I0104 12:10:47.439067 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3939a53e-a630-4d14-b88a-46820430b3f1","Type":"ContainerStarted","Data":"8984051e0a719c9bd4e6c7b21d87b8bd3ba9dfa4946a2bcb97cc2e0414e94b45"} Jan 04 12:10:47 crc kubenswrapper[5003]: I0104 12:10:47.439105 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3939a53e-a630-4d14-b88a-46820430b3f1","Type":"ContainerStarted","Data":"0a1afb1ec3d3e9940d605375623bae0df3fc31bd91e3dcff66aa8ea2851edb4a"} Jan 04 12:10:47 crc kubenswrapper[5003]: I0104 12:10:47.448488 5003 generic.go:334] "Generic (PLEG): container finished" podID="209bf3b8-aa8b-46cb-b41f-1737c40c7522" containerID="209b2744b2890a8d622a4e6eeef6ed901fe9df1453dae9e89661d1909b82f9c9" exitCode=0 Jan 04 12:10:47 crc kubenswrapper[5003]: I0104 12:10:47.448795 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-8552-account-create-update-69dtz" event={"ID":"209bf3b8-aa8b-46cb-b41f-1737c40c7522","Type":"ContainerDied","Data":"209b2744b2890a8d622a4e6eeef6ed901fe9df1453dae9e89661d1909b82f9c9"} Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.042378 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5f14-account-create-update-zqtvl" Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.173367 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-xw5xf" Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.177799 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-v7b9n" Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.178822 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ncdwq\" (UniqueName: \"kubernetes.io/projected/d2d06f5b-83b1-4aea-9ab1-83eecc92dd98-kube-api-access-ncdwq\") pod \"d2d06f5b-83b1-4aea-9ab1-83eecc92dd98\" (UID: \"d2d06f5b-83b1-4aea-9ab1-83eecc92dd98\") " Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.179180 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d2d06f5b-83b1-4aea-9ab1-83eecc92dd98-operator-scripts\") pod \"d2d06f5b-83b1-4aea-9ab1-83eecc92dd98\" (UID: \"d2d06f5b-83b1-4aea-9ab1-83eecc92dd98\") " Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.179692 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d2d06f5b-83b1-4aea-9ab1-83eecc92dd98-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d2d06f5b-83b1-4aea-9ab1-83eecc92dd98" (UID: "d2d06f5b-83b1-4aea-9ab1-83eecc92dd98"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.192189 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2d06f5b-83b1-4aea-9ab1-83eecc92dd98-kube-api-access-ncdwq" (OuterVolumeSpecName: "kube-api-access-ncdwq") pod "d2d06f5b-83b1-4aea-9ab1-83eecc92dd98" (UID: "d2d06f5b-83b1-4aea-9ab1-83eecc92dd98"). InnerVolumeSpecName "kube-api-access-ncdwq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.193065 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-nvbp2" Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.280387 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n9qjf\" (UniqueName: \"kubernetes.io/projected/8ff01a27-abff-42af-a7dd-f63b6ade45bf-kube-api-access-n9qjf\") pod \"8ff01a27-abff-42af-a7dd-f63b6ade45bf\" (UID: \"8ff01a27-abff-42af-a7dd-f63b6ade45bf\") " Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.280458 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a58094ef-de28-4ae5-84a9-b74684aca52e-operator-scripts\") pod \"a58094ef-de28-4ae5-84a9-b74684aca52e\" (UID: \"a58094ef-de28-4ae5-84a9-b74684aca52e\") " Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.280576 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8ff01a27-abff-42af-a7dd-f63b6ade45bf-operator-scripts\") pod \"8ff01a27-abff-42af-a7dd-f63b6ade45bf\" (UID: \"8ff01a27-abff-42af-a7dd-f63b6ade45bf\") " Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.280726 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w8sv8\" (UniqueName: \"kubernetes.io/projected/a58094ef-de28-4ae5-84a9-b74684aca52e-kube-api-access-w8sv8\") pod \"a58094ef-de28-4ae5-84a9-b74684aca52e\" (UID: \"a58094ef-de28-4ae5-84a9-b74684aca52e\") " Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.281272 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d2d06f5b-83b1-4aea-9ab1-83eecc92dd98-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.281324 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ncdwq\" (UniqueName: \"kubernetes.io/projected/d2d06f5b-83b1-4aea-9ab1-83eecc92dd98-kube-api-access-ncdwq\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.281475 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a58094ef-de28-4ae5-84a9-b74684aca52e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a58094ef-de28-4ae5-84a9-b74684aca52e" (UID: "a58094ef-de28-4ae5-84a9-b74684aca52e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.281999 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ff01a27-abff-42af-a7dd-f63b6ade45bf-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8ff01a27-abff-42af-a7dd-f63b6ade45bf" (UID: "8ff01a27-abff-42af-a7dd-f63b6ade45bf"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.285347 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ff01a27-abff-42af-a7dd-f63b6ade45bf-kube-api-access-n9qjf" (OuterVolumeSpecName: "kube-api-access-n9qjf") pod "8ff01a27-abff-42af-a7dd-f63b6ade45bf" (UID: "8ff01a27-abff-42af-a7dd-f63b6ade45bf"). InnerVolumeSpecName "kube-api-access-n9qjf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.287292 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a58094ef-de28-4ae5-84a9-b74684aca52e-kube-api-access-w8sv8" (OuterVolumeSpecName: "kube-api-access-w8sv8") pod "a58094ef-de28-4ae5-84a9-b74684aca52e" (UID: "a58094ef-de28-4ae5-84a9-b74684aca52e"). InnerVolumeSpecName "kube-api-access-w8sv8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.382391 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8100fc92-5887-4c6e-b489-6eabd0f88615-operator-scripts\") pod \"8100fc92-5887-4c6e-b489-6eabd0f88615\" (UID: \"8100fc92-5887-4c6e-b489-6eabd0f88615\") " Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.382544 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r4t4l\" (UniqueName: \"kubernetes.io/projected/8100fc92-5887-4c6e-b489-6eabd0f88615-kube-api-access-r4t4l\") pod \"8100fc92-5887-4c6e-b489-6eabd0f88615\" (UID: \"8100fc92-5887-4c6e-b489-6eabd0f88615\") " Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.382903 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8100fc92-5887-4c6e-b489-6eabd0f88615-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8100fc92-5887-4c6e-b489-6eabd0f88615" (UID: "8100fc92-5887-4c6e-b489-6eabd0f88615"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.384950 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8ff01a27-abff-42af-a7dd-f63b6ade45bf-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.385172 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w8sv8\" (UniqueName: \"kubernetes.io/projected/a58094ef-de28-4ae5-84a9-b74684aca52e-kube-api-access-w8sv8\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.385309 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n9qjf\" (UniqueName: \"kubernetes.io/projected/8ff01a27-abff-42af-a7dd-f63b6ade45bf-kube-api-access-n9qjf\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.385435 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a58094ef-de28-4ae5-84a9-b74684aca52e-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.387238 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8100fc92-5887-4c6e-b489-6eabd0f88615-kube-api-access-r4t4l" (OuterVolumeSpecName: "kube-api-access-r4t4l") pod "8100fc92-5887-4c6e-b489-6eabd0f88615" (UID: "8100fc92-5887-4c6e-b489-6eabd0f88615"). InnerVolumeSpecName "kube-api-access-r4t4l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.459566 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-v7b9n" event={"ID":"8ff01a27-abff-42af-a7dd-f63b6ade45bf","Type":"ContainerDied","Data":"cdc30f79d82e58db6eb1b16527f3a322798aa98f14c5ee41326aa0b4905b6594"} Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.459615 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cdc30f79d82e58db6eb1b16527f3a322798aa98f14c5ee41326aa0b4905b6594" Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.459686 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-v7b9n" Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.465292 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-xw5xf" event={"ID":"a58094ef-de28-4ae5-84a9-b74684aca52e","Type":"ContainerDied","Data":"4419b157332b336edead117098cb7385dede0c92be181abac5484cf5adb59467"} Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.465341 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4419b157332b336edead117098cb7385dede0c92be181abac5484cf5adb59467" Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.465690 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-xw5xf" Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.468808 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3939a53e-a630-4d14-b88a-46820430b3f1","Type":"ContainerStarted","Data":"235252d74ea799b01465f1e4ad56fdef5abf83b9cf2186237632fac1469986ce"} Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.471206 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-nvbp2" event={"ID":"8100fc92-5887-4c6e-b489-6eabd0f88615","Type":"ContainerDied","Data":"2e181935d93995a5be9b3206c4e64b239dd94de57e9a1f283b8be0c02440f3ff"} Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.471284 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2e181935d93995a5be9b3206c4e64b239dd94de57e9a1f283b8be0c02440f3ff" Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.471226 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-nvbp2" Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.473422 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5f14-account-create-update-zqtvl" event={"ID":"d2d06f5b-83b1-4aea-9ab1-83eecc92dd98","Type":"ContainerDied","Data":"dd989c698aa03c1080fb189f1ad32e9ca3fb0e0eb001edabcef6607600fccb9e"} Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.473476 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dd989c698aa03c1080fb189f1ad32e9ca3fb0e0eb001edabcef6607600fccb9e" Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.473537 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5f14-account-create-update-zqtvl" Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.486923 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r4t4l\" (UniqueName: \"kubernetes.io/projected/8100fc92-5887-4c6e-b489-6eabd0f88615-kube-api-access-r4t4l\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.486953 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8100fc92-5887-4c6e-b489-6eabd0f88615-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.959561 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-8552-account-create-update-69dtz" Jan 04 12:10:48 crc kubenswrapper[5003]: I0104 12:10:48.965037 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-ccc2-account-create-update-ftb5c" Jan 04 12:10:49 crc kubenswrapper[5003]: I0104 12:10:49.108968 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/209bf3b8-aa8b-46cb-b41f-1737c40c7522-operator-scripts\") pod \"209bf3b8-aa8b-46cb-b41f-1737c40c7522\" (UID: \"209bf3b8-aa8b-46cb-b41f-1737c40c7522\") " Jan 04 12:10:49 crc kubenswrapper[5003]: I0104 12:10:49.109034 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/54218530-45fd-460c-b755-49b5f28b0f02-operator-scripts\") pod \"54218530-45fd-460c-b755-49b5f28b0f02\" (UID: \"54218530-45fd-460c-b755-49b5f28b0f02\") " Jan 04 12:10:49 crc kubenswrapper[5003]: I0104 12:10:49.109106 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-27ncx\" (UniqueName: \"kubernetes.io/projected/209bf3b8-aa8b-46cb-b41f-1737c40c7522-kube-api-access-27ncx\") pod \"209bf3b8-aa8b-46cb-b41f-1737c40c7522\" (UID: \"209bf3b8-aa8b-46cb-b41f-1737c40c7522\") " Jan 04 12:10:49 crc kubenswrapper[5003]: I0104 12:10:49.109141 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hs625\" (UniqueName: \"kubernetes.io/projected/54218530-45fd-460c-b755-49b5f28b0f02-kube-api-access-hs625\") pod \"54218530-45fd-460c-b755-49b5f28b0f02\" (UID: \"54218530-45fd-460c-b755-49b5f28b0f02\") " Jan 04 12:10:49 crc kubenswrapper[5003]: I0104 12:10:49.110004 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54218530-45fd-460c-b755-49b5f28b0f02-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "54218530-45fd-460c-b755-49b5f28b0f02" (UID: "54218530-45fd-460c-b755-49b5f28b0f02"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:10:49 crc kubenswrapper[5003]: I0104 12:10:49.110046 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/209bf3b8-aa8b-46cb-b41f-1737c40c7522-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "209bf3b8-aa8b-46cb-b41f-1737c40c7522" (UID: "209bf3b8-aa8b-46cb-b41f-1737c40c7522"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:10:49 crc kubenswrapper[5003]: I0104 12:10:49.115693 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/209bf3b8-aa8b-46cb-b41f-1737c40c7522-kube-api-access-27ncx" (OuterVolumeSpecName: "kube-api-access-27ncx") pod "209bf3b8-aa8b-46cb-b41f-1737c40c7522" (UID: "209bf3b8-aa8b-46cb-b41f-1737c40c7522"). InnerVolumeSpecName "kube-api-access-27ncx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:49 crc kubenswrapper[5003]: I0104 12:10:49.118133 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54218530-45fd-460c-b755-49b5f28b0f02-kube-api-access-hs625" (OuterVolumeSpecName: "kube-api-access-hs625") pod "54218530-45fd-460c-b755-49b5f28b0f02" (UID: "54218530-45fd-460c-b755-49b5f28b0f02"). InnerVolumeSpecName "kube-api-access-hs625". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:49 crc kubenswrapper[5003]: I0104 12:10:49.213273 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/209bf3b8-aa8b-46cb-b41f-1737c40c7522-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:49 crc kubenswrapper[5003]: I0104 12:10:49.213333 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/54218530-45fd-460c-b755-49b5f28b0f02-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:49 crc kubenswrapper[5003]: I0104 12:10:49.213347 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-27ncx\" (UniqueName: \"kubernetes.io/projected/209bf3b8-aa8b-46cb-b41f-1737c40c7522-kube-api-access-27ncx\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:49 crc kubenswrapper[5003]: I0104 12:10:49.213364 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hs625\" (UniqueName: \"kubernetes.io/projected/54218530-45fd-460c-b755-49b5f28b0f02-kube-api-access-hs625\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:49 crc kubenswrapper[5003]: I0104 12:10:49.484171 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-8552-account-create-update-69dtz" Jan 04 12:10:49 crc kubenswrapper[5003]: I0104 12:10:49.484173 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-8552-account-create-update-69dtz" event={"ID":"209bf3b8-aa8b-46cb-b41f-1737c40c7522","Type":"ContainerDied","Data":"2b2dac0e94124b8aaecc363653f2c48509cae4a458e5546639d83bba3320abc6"} Jan 04 12:10:49 crc kubenswrapper[5003]: I0104 12:10:49.484266 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2b2dac0e94124b8aaecc363653f2c48509cae4a458e5546639d83bba3320abc6" Jan 04 12:10:49 crc kubenswrapper[5003]: I0104 12:10:49.487463 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-ccc2-account-create-update-ftb5c" event={"ID":"54218530-45fd-460c-b755-49b5f28b0f02","Type":"ContainerDied","Data":"018dec9d3a435cb24bd4b733250964f05446054425bf00df08a57d1e83d102c5"} Jan 04 12:10:49 crc kubenswrapper[5003]: I0104 12:10:49.487512 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-ccc2-account-create-update-ftb5c" Jan 04 12:10:49 crc kubenswrapper[5003]: I0104 12:10:49.487517 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="018dec9d3a435cb24bd4b733250964f05446054425bf00df08a57d1e83d102c5" Jan 04 12:10:50 crc kubenswrapper[5003]: I0104 12:10:50.142061 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="328505b1-7d4b-44ae-a5d1-f77f22f2e79c" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.151:9292/healthcheck\": read tcp 10.217.0.2:39566->10.217.0.151:9292: read: connection reset by peer" Jan 04 12:10:50 crc kubenswrapper[5003]: I0104 12:10:50.142117 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="328505b1-7d4b-44ae-a5d1-f77f22f2e79c" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.151:9292/healthcheck\": read tcp 10.217.0.2:39556->10.217.0.151:9292: read: connection reset by peer" Jan 04 12:10:50 crc kubenswrapper[5003]: I0104 12:10:50.262900 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:10:50 crc kubenswrapper[5003]: I0104 12:10:50.268511 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="31a5af52-3095-473b-96cd-c3531d5569bb" containerName="glance-log" containerID="cri-o://6ab8102445d68e771f1e3110b6fed506d60e8e6a9bf0cef1b20b4a966904c008" gracePeriod=30 Jan 04 12:10:50 crc kubenswrapper[5003]: I0104 12:10:50.268605 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="31a5af52-3095-473b-96cd-c3531d5569bb" containerName="glance-httpd" containerID="cri-o://d716c1374868df135ac92dd8a73e7788bc27a6e38d3942e86c10ad154a2bf75e" gracePeriod=30 Jan 04 12:10:50 crc kubenswrapper[5003]: I0104 12:10:50.507474 5003 generic.go:334] "Generic (PLEG): container finished" podID="328505b1-7d4b-44ae-a5d1-f77f22f2e79c" containerID="7ab74bdd5825ef110386c1d6b4e891b78cb32a067feea2f27d8c553d6f1324ed" exitCode=0 Jan 04 12:10:50 crc kubenswrapper[5003]: I0104 12:10:50.507562 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"328505b1-7d4b-44ae-a5d1-f77f22f2e79c","Type":"ContainerDied","Data":"7ab74bdd5825ef110386c1d6b4e891b78cb32a067feea2f27d8c553d6f1324ed"} Jan 04 12:10:50 crc kubenswrapper[5003]: I0104 12:10:50.509914 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3939a53e-a630-4d14-b88a-46820430b3f1","Type":"ContainerStarted","Data":"9e1a1ef8b56d66c8f87b643c975655ac429c659dc9d0cbae3f6ff63f88cfc22a"} Jan 04 12:10:50 crc kubenswrapper[5003]: I0104 12:10:50.510158 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 04 12:10:50 crc kubenswrapper[5003]: I0104 12:10:50.512785 5003 generic.go:334] "Generic (PLEG): container finished" podID="31a5af52-3095-473b-96cd-c3531d5569bb" containerID="6ab8102445d68e771f1e3110b6fed506d60e8e6a9bf0cef1b20b4a966904c008" exitCode=143 Jan 04 12:10:50 crc kubenswrapper[5003]: I0104 12:10:50.512823 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"31a5af52-3095-473b-96cd-c3531d5569bb","Type":"ContainerDied","Data":"6ab8102445d68e771f1e3110b6fed506d60e8e6a9bf0cef1b20b4a966904c008"} Jan 04 12:10:50 crc kubenswrapper[5003]: I0104 12:10:50.541148 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.85933133 podStartE2EDuration="6.541125945s" podCreationTimestamp="2026-01-04 12:10:44 +0000 UTC" firstStartedPulling="2026-01-04 12:10:45.595257103 +0000 UTC m=+1361.068286944" lastFinishedPulling="2026-01-04 12:10:49.277051718 +0000 UTC m=+1364.750081559" observedRunningTime="2026-01-04 12:10:50.53558202 +0000 UTC m=+1366.008611871" watchObservedRunningTime="2026-01-04 12:10:50.541125945 +0000 UTC m=+1366.014155786" Jan 04 12:10:50 crc kubenswrapper[5003]: I0104 12:10:50.792054 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 04 12:10:50 crc kubenswrapper[5003]: I0104 12:10:50.946545 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-config-data\") pod \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") " Jan 04 12:10:50 crc kubenswrapper[5003]: I0104 12:10:50.947003 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-httpd-run\") pod \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") " Jan 04 12:10:50 crc kubenswrapper[5003]: I0104 12:10:50.947132 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-combined-ca-bundle\") pod \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") " Jan 04 12:10:50 crc kubenswrapper[5003]: I0104 12:10:50.947175 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vhbr7\" (UniqueName: \"kubernetes.io/projected/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-kube-api-access-vhbr7\") pod \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") " Jan 04 12:10:50 crc kubenswrapper[5003]: I0104 12:10:50.947243 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-public-tls-certs\") pod \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") " Jan 04 12:10:50 crc kubenswrapper[5003]: I0104 12:10:50.947277 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-logs\") pod \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") " Jan 04 12:10:50 crc kubenswrapper[5003]: I0104 12:10:50.947365 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") " Jan 04 12:10:50 crc kubenswrapper[5003]: I0104 12:10:50.947450 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-scripts\") pod \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\" (UID: \"328505b1-7d4b-44ae-a5d1-f77f22f2e79c\") " Jan 04 12:10:50 crc kubenswrapper[5003]: I0104 12:10:50.947509 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "328505b1-7d4b-44ae-a5d1-f77f22f2e79c" (UID: "328505b1-7d4b-44ae-a5d1-f77f22f2e79c"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:10:50 crc kubenswrapper[5003]: I0104 12:10:50.947807 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-logs" (OuterVolumeSpecName: "logs") pod "328505b1-7d4b-44ae-a5d1-f77f22f2e79c" (UID: "328505b1-7d4b-44ae-a5d1-f77f22f2e79c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:10:50 crc kubenswrapper[5003]: I0104 12:10:50.948212 5003 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:50 crc kubenswrapper[5003]: I0104 12:10:50.948237 5003 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:50 crc kubenswrapper[5003]: I0104 12:10:50.952961 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "328505b1-7d4b-44ae-a5d1-f77f22f2e79c" (UID: "328505b1-7d4b-44ae-a5d1-f77f22f2e79c"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 04 12:10:50 crc kubenswrapper[5003]: I0104 12:10:50.953191 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-kube-api-access-vhbr7" (OuterVolumeSpecName: "kube-api-access-vhbr7") pod "328505b1-7d4b-44ae-a5d1-f77f22f2e79c" (UID: "328505b1-7d4b-44ae-a5d1-f77f22f2e79c"). InnerVolumeSpecName "kube-api-access-vhbr7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:50 crc kubenswrapper[5003]: I0104 12:10:50.954639 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-scripts" (OuterVolumeSpecName: "scripts") pod "328505b1-7d4b-44ae-a5d1-f77f22f2e79c" (UID: "328505b1-7d4b-44ae-a5d1-f77f22f2e79c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:50 crc kubenswrapper[5003]: I0104 12:10:50.988180 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "328505b1-7d4b-44ae-a5d1-f77f22f2e79c" (UID: "328505b1-7d4b-44ae-a5d1-f77f22f2e79c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.004418 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "328505b1-7d4b-44ae-a5d1-f77f22f2e79c" (UID: "328505b1-7d4b-44ae-a5d1-f77f22f2e79c"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.006632 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-config-data" (OuterVolumeSpecName: "config-data") pod "328505b1-7d4b-44ae-a5d1-f77f22f2e79c" (UID: "328505b1-7d4b-44ae-a5d1-f77f22f2e79c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.050261 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.050302 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.050337 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.050351 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vhbr7\" (UniqueName: \"kubernetes.io/projected/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-kube-api-access-vhbr7\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.050361 5003 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/328505b1-7d4b-44ae-a5d1-f77f22f2e79c-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.050396 5003 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.072638 5003 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.152472 5003 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.524174 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"328505b1-7d4b-44ae-a5d1-f77f22f2e79c","Type":"ContainerDied","Data":"1d8c5aad56d71c8fbbfcbdce0249adf35a6b858031352c9e96b1b3fb433bdd24"} Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.524225 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.524238 5003 scope.go:117] "RemoveContainer" containerID="7ab74bdd5825ef110386c1d6b4e891b78cb32a067feea2f27d8c553d6f1324ed" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.556427 5003 scope.go:117] "RemoveContainer" containerID="b03e7d885801f8cce4056f2883bd7034d6edbed6543308c3dd26b0305ab4dfa2" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.575188 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.586658 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.594305 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:10:51 crc kubenswrapper[5003]: E0104 12:10:51.594671 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a58094ef-de28-4ae5-84a9-b74684aca52e" containerName="mariadb-database-create" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.594687 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="a58094ef-de28-4ae5-84a9-b74684aca52e" containerName="mariadb-database-create" Jan 04 12:10:51 crc kubenswrapper[5003]: E0104 12:10:51.594702 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8100fc92-5887-4c6e-b489-6eabd0f88615" containerName="mariadb-database-create" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.594711 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8100fc92-5887-4c6e-b489-6eabd0f88615" containerName="mariadb-database-create" Jan 04 12:10:51 crc kubenswrapper[5003]: E0104 12:10:51.594722 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ff01a27-abff-42af-a7dd-f63b6ade45bf" containerName="mariadb-database-create" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.594728 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ff01a27-abff-42af-a7dd-f63b6ade45bf" containerName="mariadb-database-create" Jan 04 12:10:51 crc kubenswrapper[5003]: E0104 12:10:51.594736 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="328505b1-7d4b-44ae-a5d1-f77f22f2e79c" containerName="glance-httpd" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.594742 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="328505b1-7d4b-44ae-a5d1-f77f22f2e79c" containerName="glance-httpd" Jan 04 12:10:51 crc kubenswrapper[5003]: E0104 12:10:51.594751 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2d06f5b-83b1-4aea-9ab1-83eecc92dd98" containerName="mariadb-account-create-update" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.594759 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2d06f5b-83b1-4aea-9ab1-83eecc92dd98" containerName="mariadb-account-create-update" Jan 04 12:10:51 crc kubenswrapper[5003]: E0104 12:10:51.594773 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="209bf3b8-aa8b-46cb-b41f-1737c40c7522" containerName="mariadb-account-create-update" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.594780 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="209bf3b8-aa8b-46cb-b41f-1737c40c7522" containerName="mariadb-account-create-update" Jan 04 12:10:51 crc kubenswrapper[5003]: E0104 12:10:51.594798 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54218530-45fd-460c-b755-49b5f28b0f02" containerName="mariadb-account-create-update" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.594804 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="54218530-45fd-460c-b755-49b5f28b0f02" containerName="mariadb-account-create-update" Jan 04 12:10:51 crc kubenswrapper[5003]: E0104 12:10:51.594812 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="328505b1-7d4b-44ae-a5d1-f77f22f2e79c" containerName="glance-log" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.594818 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="328505b1-7d4b-44ae-a5d1-f77f22f2e79c" containerName="glance-log" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.594974 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="328505b1-7d4b-44ae-a5d1-f77f22f2e79c" containerName="glance-log" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.594988 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="209bf3b8-aa8b-46cb-b41f-1737c40c7522" containerName="mariadb-account-create-update" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.594999 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="54218530-45fd-460c-b755-49b5f28b0f02" containerName="mariadb-account-create-update" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.595005 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="8100fc92-5887-4c6e-b489-6eabd0f88615" containerName="mariadb-database-create" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.595034 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="a58094ef-de28-4ae5-84a9-b74684aca52e" containerName="mariadb-database-create" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.595041 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ff01a27-abff-42af-a7dd-f63b6ade45bf" containerName="mariadb-database-create" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.595049 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2d06f5b-83b1-4aea-9ab1-83eecc92dd98" containerName="mariadb-account-create-update" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.595060 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="328505b1-7d4b-44ae-a5d1-f77f22f2e79c" containerName="glance-httpd" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.595895 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.605143 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.608670 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.612182 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.662388 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/714823a9-560a-496c-b975-2db1099ad873-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.662455 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/714823a9-560a-496c-b975-2db1099ad873-scripts\") pod \"glance-default-external-api-0\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.662491 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.662530 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/714823a9-560a-496c-b975-2db1099ad873-logs\") pod \"glance-default-external-api-0\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.662544 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/714823a9-560a-496c-b975-2db1099ad873-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.662580 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/714823a9-560a-496c-b975-2db1099ad873-config-data\") pod \"glance-default-external-api-0\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.662617 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/714823a9-560a-496c-b975-2db1099ad873-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.662682 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hlmrv\" (UniqueName: \"kubernetes.io/projected/714823a9-560a-496c-b975-2db1099ad873-kube-api-access-hlmrv\") pod \"glance-default-external-api-0\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.764197 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hlmrv\" (UniqueName: \"kubernetes.io/projected/714823a9-560a-496c-b975-2db1099ad873-kube-api-access-hlmrv\") pod \"glance-default-external-api-0\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.764291 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/714823a9-560a-496c-b975-2db1099ad873-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.764326 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/714823a9-560a-496c-b975-2db1099ad873-scripts\") pod \"glance-default-external-api-0\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.765327 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.765453 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-external-api-0" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.769395 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/714823a9-560a-496c-b975-2db1099ad873-logs\") pod \"glance-default-external-api-0\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.769426 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/714823a9-560a-496c-b975-2db1099ad873-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.769926 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/714823a9-560a-496c-b975-2db1099ad873-logs\") pod \"glance-default-external-api-0\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.769985 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/714823a9-560a-496c-b975-2db1099ad873-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.778989 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/714823a9-560a-496c-b975-2db1099ad873-scripts\") pod \"glance-default-external-api-0\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.781710 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/714823a9-560a-496c-b975-2db1099ad873-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.782417 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/714823a9-560a-496c-b975-2db1099ad873-config-data\") pod \"glance-default-external-api-0\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.787798 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hlmrv\" (UniqueName: \"kubernetes.io/projected/714823a9-560a-496c-b975-2db1099ad873-kube-api-access-hlmrv\") pod \"glance-default-external-api-0\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.788763 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/714823a9-560a-496c-b975-2db1099ad873-config-data\") pod \"glance-default-external-api-0\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.801611 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/714823a9-560a-496c-b975-2db1099ad873-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.805191 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/714823a9-560a-496c-b975-2db1099ad873-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.818862 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-external-api-0\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") " pod="openstack/glance-default-external-api-0" Jan 04 12:10:51 crc kubenswrapper[5003]: I0104 12:10:51.914170 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 04 12:10:52 crc kubenswrapper[5003]: I0104 12:10:52.129675 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-6885ccbc9-jntqm" Jan 04 12:10:52 crc kubenswrapper[5003]: I0104 12:10:52.131683 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:10:52 crc kubenswrapper[5003]: I0104 12:10:52.133528 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-6885ccbc9-jntqm" Jan 04 12:10:52 crc kubenswrapper[5003]: I0104 12:10:52.551890 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3939a53e-a630-4d14-b88a-46820430b3f1" containerName="ceilometer-central-agent" containerID="cri-o://0a1afb1ec3d3e9940d605375623bae0df3fc31bd91e3dcff66aa8ea2851edb4a" gracePeriod=30 Jan 04 12:10:52 crc kubenswrapper[5003]: I0104 12:10:52.552410 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3939a53e-a630-4d14-b88a-46820430b3f1" containerName="proxy-httpd" containerID="cri-o://9e1a1ef8b56d66c8f87b643c975655ac429c659dc9d0cbae3f6ff63f88cfc22a" gracePeriod=30 Jan 04 12:10:52 crc kubenswrapper[5003]: I0104 12:10:52.552483 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3939a53e-a630-4d14-b88a-46820430b3f1" containerName="sg-core" containerID="cri-o://235252d74ea799b01465f1e4ad56fdef5abf83b9cf2186237632fac1469986ce" gracePeriod=30 Jan 04 12:10:52 crc kubenswrapper[5003]: I0104 12:10:52.552584 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="3939a53e-a630-4d14-b88a-46820430b3f1" containerName="ceilometer-notification-agent" containerID="cri-o://8984051e0a719c9bd4e6c7b21d87b8bd3ba9dfa4946a2bcb97cc2e0414e94b45" gracePeriod=30 Jan 04 12:10:52 crc kubenswrapper[5003]: I0104 12:10:52.693325 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:10:52 crc kubenswrapper[5003]: I0104 12:10:52.826399 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="328505b1-7d4b-44ae-a5d1-f77f22f2e79c" path="/var/lib/kubelet/pods/328505b1-7d4b-44ae-a5d1-f77f22f2e79c/volumes" Jan 04 12:10:53 crc kubenswrapper[5003]: I0104 12:10:53.433556 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="31a5af52-3095-473b-96cd-c3531d5569bb" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.155:9292/healthcheck\": read tcp 10.217.0.2:44478->10.217.0.155:9292: read: connection reset by peer" Jan 04 12:10:53 crc kubenswrapper[5003]: I0104 12:10:53.433573 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="31a5af52-3095-473b-96cd-c3531d5569bb" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.155:9292/healthcheck\": read tcp 10.217.0.2:44476->10.217.0.155:9292: read: connection reset by peer" Jan 04 12:10:53 crc kubenswrapper[5003]: I0104 12:10:53.560697 5003 generic.go:334] "Generic (PLEG): container finished" podID="3939a53e-a630-4d14-b88a-46820430b3f1" containerID="9e1a1ef8b56d66c8f87b643c975655ac429c659dc9d0cbae3f6ff63f88cfc22a" exitCode=0 Jan 04 12:10:53 crc kubenswrapper[5003]: I0104 12:10:53.560740 5003 generic.go:334] "Generic (PLEG): container finished" podID="3939a53e-a630-4d14-b88a-46820430b3f1" containerID="235252d74ea799b01465f1e4ad56fdef5abf83b9cf2186237632fac1469986ce" exitCode=2 Jan 04 12:10:53 crc kubenswrapper[5003]: I0104 12:10:53.560749 5003 generic.go:334] "Generic (PLEG): container finished" podID="3939a53e-a630-4d14-b88a-46820430b3f1" containerID="8984051e0a719c9bd4e6c7b21d87b8bd3ba9dfa4946a2bcb97cc2e0414e94b45" exitCode=0 Jan 04 12:10:53 crc kubenswrapper[5003]: I0104 12:10:53.560756 5003 generic.go:334] "Generic (PLEG): container finished" podID="3939a53e-a630-4d14-b88a-46820430b3f1" containerID="0a1afb1ec3d3e9940d605375623bae0df3fc31bd91e3dcff66aa8ea2851edb4a" exitCode=0 Jan 04 12:10:53 crc kubenswrapper[5003]: I0104 12:10:53.560769 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3939a53e-a630-4d14-b88a-46820430b3f1","Type":"ContainerDied","Data":"9e1a1ef8b56d66c8f87b643c975655ac429c659dc9d0cbae3f6ff63f88cfc22a"} Jan 04 12:10:53 crc kubenswrapper[5003]: I0104 12:10:53.560824 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3939a53e-a630-4d14-b88a-46820430b3f1","Type":"ContainerDied","Data":"235252d74ea799b01465f1e4ad56fdef5abf83b9cf2186237632fac1469986ce"} Jan 04 12:10:53 crc kubenswrapper[5003]: I0104 12:10:53.560837 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3939a53e-a630-4d14-b88a-46820430b3f1","Type":"ContainerDied","Data":"8984051e0a719c9bd4e6c7b21d87b8bd3ba9dfa4946a2bcb97cc2e0414e94b45"} Jan 04 12:10:53 crc kubenswrapper[5003]: I0104 12:10:53.560849 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3939a53e-a630-4d14-b88a-46820430b3f1","Type":"ContainerDied","Data":"0a1afb1ec3d3e9940d605375623bae0df3fc31bd91e3dcff66aa8ea2851edb4a"} Jan 04 12:10:53 crc kubenswrapper[5003]: I0104 12:10:53.561886 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"714823a9-560a-496c-b975-2db1099ad873","Type":"ContainerStarted","Data":"02447dca870462f4df40064d89c32f3a3d466ae10125a4bb87a31ddc1ecd1f9e"} Jan 04 12:10:53 crc kubenswrapper[5003]: I0104 12:10:53.561907 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"714823a9-560a-496c-b975-2db1099ad873","Type":"ContainerStarted","Data":"5079538a30b9ac5c3493bac521155c91f7dc65296908f66d20dc4e686b909028"} Jan 04 12:10:54 crc kubenswrapper[5003]: I0104 12:10:54.506615 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-7z7gd"] Jan 04 12:10:54 crc kubenswrapper[5003]: I0104 12:10:54.508943 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-7z7gd" Jan 04 12:10:54 crc kubenswrapper[5003]: I0104 12:10:54.512645 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-lh872" Jan 04 12:10:54 crc kubenswrapper[5003]: I0104 12:10:54.512860 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Jan 04 12:10:54 crc kubenswrapper[5003]: I0104 12:10:54.513368 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Jan 04 12:10:54 crc kubenswrapper[5003]: I0104 12:10:54.521351 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-7z7gd"] Jan 04 12:10:54 crc kubenswrapper[5003]: I0104 12:10:54.584741 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrzvs\" (UniqueName: \"kubernetes.io/projected/4689f11c-e91e-4045-ae29-5377e6b8ae6e-kube-api-access-jrzvs\") pod \"nova-cell0-conductor-db-sync-7z7gd\" (UID: \"4689f11c-e91e-4045-ae29-5377e6b8ae6e\") " pod="openstack/nova-cell0-conductor-db-sync-7z7gd" Jan 04 12:10:54 crc kubenswrapper[5003]: I0104 12:10:54.584817 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4689f11c-e91e-4045-ae29-5377e6b8ae6e-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-7z7gd\" (UID: \"4689f11c-e91e-4045-ae29-5377e6b8ae6e\") " pod="openstack/nova-cell0-conductor-db-sync-7z7gd" Jan 04 12:10:54 crc kubenswrapper[5003]: I0104 12:10:54.584884 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4689f11c-e91e-4045-ae29-5377e6b8ae6e-scripts\") pod \"nova-cell0-conductor-db-sync-7z7gd\" (UID: \"4689f11c-e91e-4045-ae29-5377e6b8ae6e\") " pod="openstack/nova-cell0-conductor-db-sync-7z7gd" Jan 04 12:10:54 crc kubenswrapper[5003]: I0104 12:10:54.584953 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4689f11c-e91e-4045-ae29-5377e6b8ae6e-config-data\") pod \"nova-cell0-conductor-db-sync-7z7gd\" (UID: \"4689f11c-e91e-4045-ae29-5377e6b8ae6e\") " pod="openstack/nova-cell0-conductor-db-sync-7z7gd" Jan 04 12:10:54 crc kubenswrapper[5003]: I0104 12:10:54.687215 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4689f11c-e91e-4045-ae29-5377e6b8ae6e-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-7z7gd\" (UID: \"4689f11c-e91e-4045-ae29-5377e6b8ae6e\") " pod="openstack/nova-cell0-conductor-db-sync-7z7gd" Jan 04 12:10:54 crc kubenswrapper[5003]: I0104 12:10:54.687313 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4689f11c-e91e-4045-ae29-5377e6b8ae6e-scripts\") pod \"nova-cell0-conductor-db-sync-7z7gd\" (UID: \"4689f11c-e91e-4045-ae29-5377e6b8ae6e\") " pod="openstack/nova-cell0-conductor-db-sync-7z7gd" Jan 04 12:10:54 crc kubenswrapper[5003]: I0104 12:10:54.687370 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4689f11c-e91e-4045-ae29-5377e6b8ae6e-config-data\") pod \"nova-cell0-conductor-db-sync-7z7gd\" (UID: \"4689f11c-e91e-4045-ae29-5377e6b8ae6e\") " pod="openstack/nova-cell0-conductor-db-sync-7z7gd" Jan 04 12:10:54 crc kubenswrapper[5003]: I0104 12:10:54.687458 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrzvs\" (UniqueName: \"kubernetes.io/projected/4689f11c-e91e-4045-ae29-5377e6b8ae6e-kube-api-access-jrzvs\") pod \"nova-cell0-conductor-db-sync-7z7gd\" (UID: \"4689f11c-e91e-4045-ae29-5377e6b8ae6e\") " pod="openstack/nova-cell0-conductor-db-sync-7z7gd" Jan 04 12:10:54 crc kubenswrapper[5003]: I0104 12:10:54.693537 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4689f11c-e91e-4045-ae29-5377e6b8ae6e-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-7z7gd\" (UID: \"4689f11c-e91e-4045-ae29-5377e6b8ae6e\") " pod="openstack/nova-cell0-conductor-db-sync-7z7gd" Jan 04 12:10:54 crc kubenswrapper[5003]: I0104 12:10:54.694252 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4689f11c-e91e-4045-ae29-5377e6b8ae6e-config-data\") pod \"nova-cell0-conductor-db-sync-7z7gd\" (UID: \"4689f11c-e91e-4045-ae29-5377e6b8ae6e\") " pod="openstack/nova-cell0-conductor-db-sync-7z7gd" Jan 04 12:10:54 crc kubenswrapper[5003]: I0104 12:10:54.700007 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4689f11c-e91e-4045-ae29-5377e6b8ae6e-scripts\") pod \"nova-cell0-conductor-db-sync-7z7gd\" (UID: \"4689f11c-e91e-4045-ae29-5377e6b8ae6e\") " pod="openstack/nova-cell0-conductor-db-sync-7z7gd" Jan 04 12:10:54 crc kubenswrapper[5003]: I0104 12:10:54.706508 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrzvs\" (UniqueName: \"kubernetes.io/projected/4689f11c-e91e-4045-ae29-5377e6b8ae6e-kube-api-access-jrzvs\") pod \"nova-cell0-conductor-db-sync-7z7gd\" (UID: \"4689f11c-e91e-4045-ae29-5377e6b8ae6e\") " pod="openstack/nova-cell0-conductor-db-sync-7z7gd" Jan 04 12:10:54 crc kubenswrapper[5003]: I0104 12:10:54.842935 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-7z7gd" Jan 04 12:10:55 crc kubenswrapper[5003]: I0104 12:10:55.583386 5003 generic.go:334] "Generic (PLEG): container finished" podID="31a5af52-3095-473b-96cd-c3531d5569bb" containerID="d716c1374868df135ac92dd8a73e7788bc27a6e38d3942e86c10ad154a2bf75e" exitCode=0 Jan 04 12:10:55 crc kubenswrapper[5003]: I0104 12:10:55.583488 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"31a5af52-3095-473b-96cd-c3531d5569bb","Type":"ContainerDied","Data":"d716c1374868df135ac92dd8a73e7788bc27a6e38d3942e86c10ad154a2bf75e"} Jan 04 12:10:55 crc kubenswrapper[5003]: I0104 12:10:55.703888 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:10:55 crc kubenswrapper[5003]: I0104 12:10:55.771037 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-7z7gd"] Jan 04 12:10:55 crc kubenswrapper[5003]: I0104 12:10:55.819466 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3939a53e-a630-4d14-b88a-46820430b3f1-combined-ca-bundle\") pod \"3939a53e-a630-4d14-b88a-46820430b3f1\" (UID: \"3939a53e-a630-4d14-b88a-46820430b3f1\") " Jan 04 12:10:55 crc kubenswrapper[5003]: I0104 12:10:55.819593 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3939a53e-a630-4d14-b88a-46820430b3f1-run-httpd\") pod \"3939a53e-a630-4d14-b88a-46820430b3f1\" (UID: \"3939a53e-a630-4d14-b88a-46820430b3f1\") " Jan 04 12:10:55 crc kubenswrapper[5003]: I0104 12:10:55.819704 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3939a53e-a630-4d14-b88a-46820430b3f1-log-httpd\") pod \"3939a53e-a630-4d14-b88a-46820430b3f1\" (UID: \"3939a53e-a630-4d14-b88a-46820430b3f1\") " Jan 04 12:10:55 crc kubenswrapper[5003]: I0104 12:10:55.819815 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3939a53e-a630-4d14-b88a-46820430b3f1-sg-core-conf-yaml\") pod \"3939a53e-a630-4d14-b88a-46820430b3f1\" (UID: \"3939a53e-a630-4d14-b88a-46820430b3f1\") " Jan 04 12:10:55 crc kubenswrapper[5003]: I0104 12:10:55.819877 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3939a53e-a630-4d14-b88a-46820430b3f1-scripts\") pod \"3939a53e-a630-4d14-b88a-46820430b3f1\" (UID: \"3939a53e-a630-4d14-b88a-46820430b3f1\") " Jan 04 12:10:55 crc kubenswrapper[5003]: I0104 12:10:55.819952 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3939a53e-a630-4d14-b88a-46820430b3f1-config-data\") pod \"3939a53e-a630-4d14-b88a-46820430b3f1\" (UID: \"3939a53e-a630-4d14-b88a-46820430b3f1\") " Jan 04 12:10:55 crc kubenswrapper[5003]: I0104 12:10:55.820023 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qd4sf\" (UniqueName: \"kubernetes.io/projected/3939a53e-a630-4d14-b88a-46820430b3f1-kube-api-access-qd4sf\") pod \"3939a53e-a630-4d14-b88a-46820430b3f1\" (UID: \"3939a53e-a630-4d14-b88a-46820430b3f1\") " Jan 04 12:10:55 crc kubenswrapper[5003]: I0104 12:10:55.825179 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3939a53e-a630-4d14-b88a-46820430b3f1-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "3939a53e-a630-4d14-b88a-46820430b3f1" (UID: "3939a53e-a630-4d14-b88a-46820430b3f1"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:10:55 crc kubenswrapper[5003]: I0104 12:10:55.826061 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3939a53e-a630-4d14-b88a-46820430b3f1-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "3939a53e-a630-4d14-b88a-46820430b3f1" (UID: "3939a53e-a630-4d14-b88a-46820430b3f1"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:10:55 crc kubenswrapper[5003]: I0104 12:10:55.830625 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3939a53e-a630-4d14-b88a-46820430b3f1-kube-api-access-qd4sf" (OuterVolumeSpecName: "kube-api-access-qd4sf") pod "3939a53e-a630-4d14-b88a-46820430b3f1" (UID: "3939a53e-a630-4d14-b88a-46820430b3f1"). InnerVolumeSpecName "kube-api-access-qd4sf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:55 crc kubenswrapper[5003]: I0104 12:10:55.831169 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3939a53e-a630-4d14-b88a-46820430b3f1-scripts" (OuterVolumeSpecName: "scripts") pod "3939a53e-a630-4d14-b88a-46820430b3f1" (UID: "3939a53e-a630-4d14-b88a-46820430b3f1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:55 crc kubenswrapper[5003]: I0104 12:10:55.898621 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3939a53e-a630-4d14-b88a-46820430b3f1-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "3939a53e-a630-4d14-b88a-46820430b3f1" (UID: "3939a53e-a630-4d14-b88a-46820430b3f1"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:55 crc kubenswrapper[5003]: I0104 12:10:55.923451 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3939a53e-a630-4d14-b88a-46820430b3f1-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:55 crc kubenswrapper[5003]: I0104 12:10:55.923493 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qd4sf\" (UniqueName: \"kubernetes.io/projected/3939a53e-a630-4d14-b88a-46820430b3f1-kube-api-access-qd4sf\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:55 crc kubenswrapper[5003]: I0104 12:10:55.923510 5003 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3939a53e-a630-4d14-b88a-46820430b3f1-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:55 crc kubenswrapper[5003]: I0104 12:10:55.923522 5003 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3939a53e-a630-4d14-b88a-46820430b3f1-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:55 crc kubenswrapper[5003]: I0104 12:10:55.923533 5003 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3939a53e-a630-4d14-b88a-46820430b3f1-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:55 crc kubenswrapper[5003]: I0104 12:10:55.937226 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3939a53e-a630-4d14-b88a-46820430b3f1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3939a53e-a630-4d14-b88a-46820430b3f1" (UID: "3939a53e-a630-4d14-b88a-46820430b3f1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:55 crc kubenswrapper[5003]: I0104 12:10:55.965305 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3939a53e-a630-4d14-b88a-46820430b3f1-config-data" (OuterVolumeSpecName: "config-data") pod "3939a53e-a630-4d14-b88a-46820430b3f1" (UID: "3939a53e-a630-4d14-b88a-46820430b3f1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:55 crc kubenswrapper[5003]: I0104 12:10:55.986433 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.024816 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3939a53e-a630-4d14-b88a-46820430b3f1-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.024859 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3939a53e-a630-4d14-b88a-46820430b3f1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.126084 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mgtjc\" (UniqueName: \"kubernetes.io/projected/31a5af52-3095-473b-96cd-c3531d5569bb-kube-api-access-mgtjc\") pod \"31a5af52-3095-473b-96cd-c3531d5569bb\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") " Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.126133 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"31a5af52-3095-473b-96cd-c3531d5569bb\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") " Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.126288 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31a5af52-3095-473b-96cd-c3531d5569bb-config-data\") pod \"31a5af52-3095-473b-96cd-c3531d5569bb\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") " Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.126317 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/31a5af52-3095-473b-96cd-c3531d5569bb-httpd-run\") pod \"31a5af52-3095-473b-96cd-c3531d5569bb\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") " Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.126414 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/31a5af52-3095-473b-96cd-c3531d5569bb-internal-tls-certs\") pod \"31a5af52-3095-473b-96cd-c3531d5569bb\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") " Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.126437 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31a5af52-3095-473b-96cd-c3531d5569bb-scripts\") pod \"31a5af52-3095-473b-96cd-c3531d5569bb\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") " Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.126463 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/31a5af52-3095-473b-96cd-c3531d5569bb-logs\") pod \"31a5af52-3095-473b-96cd-c3531d5569bb\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") " Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.126493 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31a5af52-3095-473b-96cd-c3531d5569bb-combined-ca-bundle\") pod \"31a5af52-3095-473b-96cd-c3531d5569bb\" (UID: \"31a5af52-3095-473b-96cd-c3531d5569bb\") " Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.129550 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31a5af52-3095-473b-96cd-c3531d5569bb-logs" (OuterVolumeSpecName: "logs") pod "31a5af52-3095-473b-96cd-c3531d5569bb" (UID: "31a5af52-3095-473b-96cd-c3531d5569bb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.129717 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "31a5af52-3095-473b-96cd-c3531d5569bb" (UID: "31a5af52-3095-473b-96cd-c3531d5569bb"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.129874 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31a5af52-3095-473b-96cd-c3531d5569bb-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "31a5af52-3095-473b-96cd-c3531d5569bb" (UID: "31a5af52-3095-473b-96cd-c3531d5569bb"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.131838 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31a5af52-3095-473b-96cd-c3531d5569bb-scripts" (OuterVolumeSpecName: "scripts") pod "31a5af52-3095-473b-96cd-c3531d5569bb" (UID: "31a5af52-3095-473b-96cd-c3531d5569bb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.145492 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31a5af52-3095-473b-96cd-c3531d5569bb-kube-api-access-mgtjc" (OuterVolumeSpecName: "kube-api-access-mgtjc") pod "31a5af52-3095-473b-96cd-c3531d5569bb" (UID: "31a5af52-3095-473b-96cd-c3531d5569bb"). InnerVolumeSpecName "kube-api-access-mgtjc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.152958 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31a5af52-3095-473b-96cd-c3531d5569bb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "31a5af52-3095-473b-96cd-c3531d5569bb" (UID: "31a5af52-3095-473b-96cd-c3531d5569bb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.192810 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31a5af52-3095-473b-96cd-c3531d5569bb-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "31a5af52-3095-473b-96cd-c3531d5569bb" (UID: "31a5af52-3095-473b-96cd-c3531d5569bb"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.197194 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31a5af52-3095-473b-96cd-c3531d5569bb-config-data" (OuterVolumeSpecName: "config-data") pod "31a5af52-3095-473b-96cd-c3531d5569bb" (UID: "31a5af52-3095-473b-96cd-c3531d5569bb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.231963 5003 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/31a5af52-3095-473b-96cd-c3531d5569bb-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.232007 5003 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/31a5af52-3095-473b-96cd-c3531d5569bb-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.232040 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31a5af52-3095-473b-96cd-c3531d5569bb-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.232051 5003 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/31a5af52-3095-473b-96cd-c3531d5569bb-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.232062 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31a5af52-3095-473b-96cd-c3531d5569bb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.232072 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mgtjc\" (UniqueName: \"kubernetes.io/projected/31a5af52-3095-473b-96cd-c3531d5569bb-kube-api-access-mgtjc\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.232112 5003 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.232123 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31a5af52-3095-473b-96cd-c3531d5569bb-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.250137 5003 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.334059 5003 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.603760 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"31a5af52-3095-473b-96cd-c3531d5569bb","Type":"ContainerDied","Data":"49453d0030b3d98de1edc94ac0b653b6a1c772ee3690bc20b808b58d02591ca9"} Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.604324 5003 scope.go:117] "RemoveContainer" containerID="d716c1374868df135ac92dd8a73e7788bc27a6e38d3942e86c10ad154a2bf75e" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.604108 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.606834 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"714823a9-560a-496c-b975-2db1099ad873","Type":"ContainerStarted","Data":"5b23433759eb714853942994a37054e568cfd11f999d8e6d3f6f86c33c3787c4"} Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.607998 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-7z7gd" event={"ID":"4689f11c-e91e-4045-ae29-5377e6b8ae6e","Type":"ContainerStarted","Data":"bd637d0bcc4ae556d6d2470c852f53dd91c6fc516b6506c2b3938b8976598afb"} Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.637640 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"3939a53e-a630-4d14-b88a-46820430b3f1","Type":"ContainerDied","Data":"c78cfa6282bbdbed650a9975c7951df5b64234edcace0d0c2ce6dd1ca5c1cc91"} Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.637816 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.640703 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.640657758 podStartE2EDuration="5.640657758s" podCreationTimestamp="2026-01-04 12:10:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:10:56.629714052 +0000 UTC m=+1372.102743903" watchObservedRunningTime="2026-01-04 12:10:56.640657758 +0000 UTC m=+1372.113687609" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.688851 5003 scope.go:117] "RemoveContainer" containerID="6ab8102445d68e771f1e3110b6fed506d60e8e6a9bf0cef1b20b4a966904c008" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.716262 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.727129 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.738269 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.745339 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.757167 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:10:56 crc kubenswrapper[5003]: E0104 12:10:56.757588 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3939a53e-a630-4d14-b88a-46820430b3f1" containerName="sg-core" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.757605 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="3939a53e-a630-4d14-b88a-46820430b3f1" containerName="sg-core" Jan 04 12:10:56 crc kubenswrapper[5003]: E0104 12:10:56.757628 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3939a53e-a630-4d14-b88a-46820430b3f1" containerName="ceilometer-central-agent" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.757635 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="3939a53e-a630-4d14-b88a-46820430b3f1" containerName="ceilometer-central-agent" Jan 04 12:10:56 crc kubenswrapper[5003]: E0104 12:10:56.757648 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3939a53e-a630-4d14-b88a-46820430b3f1" containerName="ceilometer-notification-agent" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.757655 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="3939a53e-a630-4d14-b88a-46820430b3f1" containerName="ceilometer-notification-agent" Jan 04 12:10:56 crc kubenswrapper[5003]: E0104 12:10:56.757674 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3939a53e-a630-4d14-b88a-46820430b3f1" containerName="proxy-httpd" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.757679 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="3939a53e-a630-4d14-b88a-46820430b3f1" containerName="proxy-httpd" Jan 04 12:10:56 crc kubenswrapper[5003]: E0104 12:10:56.757692 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31a5af52-3095-473b-96cd-c3531d5569bb" containerName="glance-httpd" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.757698 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="31a5af52-3095-473b-96cd-c3531d5569bb" containerName="glance-httpd" Jan 04 12:10:56 crc kubenswrapper[5003]: E0104 12:10:56.757709 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31a5af52-3095-473b-96cd-c3531d5569bb" containerName="glance-log" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.757715 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="31a5af52-3095-473b-96cd-c3531d5569bb" containerName="glance-log" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.757899 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="31a5af52-3095-473b-96cd-c3531d5569bb" containerName="glance-log" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.757910 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="3939a53e-a630-4d14-b88a-46820430b3f1" containerName="proxy-httpd" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.757921 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="3939a53e-a630-4d14-b88a-46820430b3f1" containerName="ceilometer-central-agent" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.757931 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="3939a53e-a630-4d14-b88a-46820430b3f1" containerName="ceilometer-notification-agent" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.757941 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="31a5af52-3095-473b-96cd-c3531d5569bb" containerName="glance-httpd" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.757958 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="3939a53e-a630-4d14-b88a-46820430b3f1" containerName="sg-core" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.780294 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.780422 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.787455 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.787652 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.798290 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.799833 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.801459 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.801685 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.841615 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31a5af52-3095-473b-96cd-c3531d5569bb" path="/var/lib/kubelet/pods/31a5af52-3095-473b-96cd-c3531d5569bb/volumes" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.842509 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3939a53e-a630-4d14-b88a-46820430b3f1" path="/var/lib/kubelet/pods/3939a53e-a630-4d14-b88a-46820430b3f1/volumes" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.844154 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e51a292-b57d-4dc8-bca6-65ff67198a27-config-data\") pod \"ceilometer-0\" (UID: \"8e51a292-b57d-4dc8-bca6-65ff67198a27\") " pod="openstack/ceilometer-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.844242 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8r86l\" (UniqueName: \"kubernetes.io/projected/8e51a292-b57d-4dc8-bca6-65ff67198a27-kube-api-access-8r86l\") pod \"ceilometer-0\" (UID: \"8e51a292-b57d-4dc8-bca6-65ff67198a27\") " pod="openstack/ceilometer-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.844273 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8e51a292-b57d-4dc8-bca6-65ff67198a27-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8e51a292-b57d-4dc8-bca6-65ff67198a27\") " pod="openstack/ceilometer-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.844295 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e51a292-b57d-4dc8-bca6-65ff67198a27-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8e51a292-b57d-4dc8-bca6-65ff67198a27\") " pod="openstack/ceilometer-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.844325 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e51a292-b57d-4dc8-bca6-65ff67198a27-scripts\") pod \"ceilometer-0\" (UID: \"8e51a292-b57d-4dc8-bca6-65ff67198a27\") " pod="openstack/ceilometer-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.844356 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e51a292-b57d-4dc8-bca6-65ff67198a27-run-httpd\") pod \"ceilometer-0\" (UID: \"8e51a292-b57d-4dc8-bca6-65ff67198a27\") " pod="openstack/ceilometer-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.844383 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e51a292-b57d-4dc8-bca6-65ff67198a27-log-httpd\") pod \"ceilometer-0\" (UID: \"8e51a292-b57d-4dc8-bca6-65ff67198a27\") " pod="openstack/ceilometer-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.845046 5003 scope.go:117] "RemoveContainer" containerID="9e1a1ef8b56d66c8f87b643c975655ac429c659dc9d0cbae3f6ff63f88cfc22a" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.846416 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.882369 5003 scope.go:117] "RemoveContainer" containerID="235252d74ea799b01465f1e4ad56fdef5abf83b9cf2186237632fac1469986ce" Jan 04 12:10:56 crc kubenswrapper[5003]: E0104 12:10:56.930193 5003 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod31a5af52_3095_473b_96cd_c3531d5569bb.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod31a5af52_3095_473b_96cd_c3531d5569bb.slice/crio-49453d0030b3d98de1edc94ac0b653b6a1c772ee3690bc20b808b58d02591ca9\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3939a53e_a630_4d14_b88a_46820430b3f1.slice\": RecentStats: unable to find data in memory cache]" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.938404 5003 scope.go:117] "RemoveContainer" containerID="8984051e0a719c9bd4e6c7b21d87b8bd3ba9dfa4946a2bcb97cc2e0414e94b45" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.945905 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/43c1199f-e162-4062-a972-417afa58eaa6-logs\") pod \"glance-default-internal-api-0\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.945948 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/43c1199f-e162-4062-a972-417afa58eaa6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.946042 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43c1199f-e162-4062-a972-417afa58eaa6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.946130 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43c1199f-e162-4062-a972-417afa58eaa6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.946209 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e51a292-b57d-4dc8-bca6-65ff67198a27-config-data\") pod \"ceilometer-0\" (UID: \"8e51a292-b57d-4dc8-bca6-65ff67198a27\") " pod="openstack/ceilometer-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.946273 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43c1199f-e162-4062-a972-417afa58eaa6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.946335 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8r86l\" (UniqueName: \"kubernetes.io/projected/8e51a292-b57d-4dc8-bca6-65ff67198a27-kube-api-access-8r86l\") pod \"ceilometer-0\" (UID: \"8e51a292-b57d-4dc8-bca6-65ff67198a27\") " pod="openstack/ceilometer-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.946371 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8e51a292-b57d-4dc8-bca6-65ff67198a27-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8e51a292-b57d-4dc8-bca6-65ff67198a27\") " pod="openstack/ceilometer-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.946395 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e51a292-b57d-4dc8-bca6-65ff67198a27-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8e51a292-b57d-4dc8-bca6-65ff67198a27\") " pod="openstack/ceilometer-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.946424 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.946450 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e51a292-b57d-4dc8-bca6-65ff67198a27-scripts\") pod \"ceilometer-0\" (UID: \"8e51a292-b57d-4dc8-bca6-65ff67198a27\") " pod="openstack/ceilometer-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.946478 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e51a292-b57d-4dc8-bca6-65ff67198a27-run-httpd\") pod \"ceilometer-0\" (UID: \"8e51a292-b57d-4dc8-bca6-65ff67198a27\") " pod="openstack/ceilometer-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.946506 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/43c1199f-e162-4062-a972-417afa58eaa6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.946548 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdnk9\" (UniqueName: \"kubernetes.io/projected/43c1199f-e162-4062-a972-417afa58eaa6-kube-api-access-qdnk9\") pod \"glance-default-internal-api-0\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.946575 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e51a292-b57d-4dc8-bca6-65ff67198a27-log-httpd\") pod \"ceilometer-0\" (UID: \"8e51a292-b57d-4dc8-bca6-65ff67198a27\") " pod="openstack/ceilometer-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.947030 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e51a292-b57d-4dc8-bca6-65ff67198a27-log-httpd\") pod \"ceilometer-0\" (UID: \"8e51a292-b57d-4dc8-bca6-65ff67198a27\") " pod="openstack/ceilometer-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.947288 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e51a292-b57d-4dc8-bca6-65ff67198a27-run-httpd\") pod \"ceilometer-0\" (UID: \"8e51a292-b57d-4dc8-bca6-65ff67198a27\") " pod="openstack/ceilometer-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.953208 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8e51a292-b57d-4dc8-bca6-65ff67198a27-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8e51a292-b57d-4dc8-bca6-65ff67198a27\") " pod="openstack/ceilometer-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.955790 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e51a292-b57d-4dc8-bca6-65ff67198a27-scripts\") pod \"ceilometer-0\" (UID: \"8e51a292-b57d-4dc8-bca6-65ff67198a27\") " pod="openstack/ceilometer-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.955955 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e51a292-b57d-4dc8-bca6-65ff67198a27-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8e51a292-b57d-4dc8-bca6-65ff67198a27\") " pod="openstack/ceilometer-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.958096 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e51a292-b57d-4dc8-bca6-65ff67198a27-config-data\") pod \"ceilometer-0\" (UID: \"8e51a292-b57d-4dc8-bca6-65ff67198a27\") " pod="openstack/ceilometer-0" Jan 04 12:10:56 crc kubenswrapper[5003]: I0104 12:10:56.967949 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8r86l\" (UniqueName: \"kubernetes.io/projected/8e51a292-b57d-4dc8-bca6-65ff67198a27-kube-api-access-8r86l\") pod \"ceilometer-0\" (UID: \"8e51a292-b57d-4dc8-bca6-65ff67198a27\") " pod="openstack/ceilometer-0" Jan 04 12:10:57 crc kubenswrapper[5003]: I0104 12:10:57.048513 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:57 crc kubenswrapper[5003]: I0104 12:10:57.048573 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/43c1199f-e162-4062-a972-417afa58eaa6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:57 crc kubenswrapper[5003]: I0104 12:10:57.048592 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdnk9\" (UniqueName: \"kubernetes.io/projected/43c1199f-e162-4062-a972-417afa58eaa6-kube-api-access-qdnk9\") pod \"glance-default-internal-api-0\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:57 crc kubenswrapper[5003]: I0104 12:10:57.048632 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/43c1199f-e162-4062-a972-417afa58eaa6-logs\") pod \"glance-default-internal-api-0\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:57 crc kubenswrapper[5003]: I0104 12:10:57.048653 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/43c1199f-e162-4062-a972-417afa58eaa6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:57 crc kubenswrapper[5003]: I0104 12:10:57.048689 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43c1199f-e162-4062-a972-417afa58eaa6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:57 crc kubenswrapper[5003]: I0104 12:10:57.048727 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43c1199f-e162-4062-a972-417afa58eaa6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:57 crc kubenswrapper[5003]: I0104 12:10:57.048784 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43c1199f-e162-4062-a972-417afa58eaa6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:57 crc kubenswrapper[5003]: I0104 12:10:57.049577 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:57 crc kubenswrapper[5003]: I0104 12:10:57.050511 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/43c1199f-e162-4062-a972-417afa58eaa6-logs\") pod \"glance-default-internal-api-0\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:57 crc kubenswrapper[5003]: I0104 12:10:57.050541 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/43c1199f-e162-4062-a972-417afa58eaa6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:57 crc kubenswrapper[5003]: I0104 12:10:57.057869 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43c1199f-e162-4062-a972-417afa58eaa6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:57 crc kubenswrapper[5003]: I0104 12:10:57.058071 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/43c1199f-e162-4062-a972-417afa58eaa6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:57 crc kubenswrapper[5003]: I0104 12:10:57.058273 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43c1199f-e162-4062-a972-417afa58eaa6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:57 crc kubenswrapper[5003]: I0104 12:10:57.063874 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43c1199f-e162-4062-a972-417afa58eaa6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:57 crc kubenswrapper[5003]: I0104 12:10:57.067310 5003 scope.go:117] "RemoveContainer" containerID="0a1afb1ec3d3e9940d605375623bae0df3fc31bd91e3dcff66aa8ea2851edb4a" Jan 04 12:10:57 crc kubenswrapper[5003]: I0104 12:10:57.096752 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdnk9\" (UniqueName: \"kubernetes.io/projected/43c1199f-e162-4062-a972-417afa58eaa6-kube-api-access-qdnk9\") pod \"glance-default-internal-api-0\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:57 crc kubenswrapper[5003]: I0104 12:10:57.111462 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") " pod="openstack/glance-default-internal-api-0" Jan 04 12:10:57 crc kubenswrapper[5003]: I0104 12:10:57.123747 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:10:57 crc kubenswrapper[5003]: I0104 12:10:57.161006 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 04 12:10:57 crc kubenswrapper[5003]: I0104 12:10:57.702499 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:10:57 crc kubenswrapper[5003]: I0104 12:10:57.931138 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:10:57 crc kubenswrapper[5003]: W0104 12:10:57.934484 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod43c1199f_e162_4062_a972_417afa58eaa6.slice/crio-ab44f32cf572bf8c1e8ffc367ae849e6c8acdc85910460ec964a7aad847487a0 WatchSource:0}: Error finding container ab44f32cf572bf8c1e8ffc367ae849e6c8acdc85910460ec964a7aad847487a0: Status 404 returned error can't find the container with id ab44f32cf572bf8c1e8ffc367ae849e6c8acdc85910460ec964a7aad847487a0 Jan 04 12:10:58 crc kubenswrapper[5003]: I0104 12:10:58.710753 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"43c1199f-e162-4062-a972-417afa58eaa6","Type":"ContainerStarted","Data":"2faff7fc607a82782ff7622c7373fabbd6ab09171776a5361884986f16ec6df5"} Jan 04 12:10:58 crc kubenswrapper[5003]: I0104 12:10:58.711512 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"43c1199f-e162-4062-a972-417afa58eaa6","Type":"ContainerStarted","Data":"ab44f32cf572bf8c1e8ffc367ae849e6c8acdc85910460ec964a7aad847487a0"} Jan 04 12:10:58 crc kubenswrapper[5003]: I0104 12:10:58.727360 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8e51a292-b57d-4dc8-bca6-65ff67198a27","Type":"ContainerStarted","Data":"d18e34024e2cc26a8c19bd692f402ce02356e8651a1d5e2fcd08cebad31487e0"} Jan 04 12:10:58 crc kubenswrapper[5003]: I0104 12:10:58.727425 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8e51a292-b57d-4dc8-bca6-65ff67198a27","Type":"ContainerStarted","Data":"50089b424f4ec1dec86a03457813d4d56c8e800fd2b0810a8738fe0f8955cdd5"} Jan 04 12:10:58 crc kubenswrapper[5003]: I0104 12:10:58.738050 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:10:59 crc kubenswrapper[5003]: I0104 12:10:59.746442 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"43c1199f-e162-4062-a972-417afa58eaa6","Type":"ContainerStarted","Data":"193d03f2dc8b6a55e957d530e54441551f102796455d35a76dcd721cbba41982"} Jan 04 12:10:59 crc kubenswrapper[5003]: I0104 12:10:59.758369 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8e51a292-b57d-4dc8-bca6-65ff67198a27","Type":"ContainerStarted","Data":"31f431ee2e7394251280aa035ee0db15e9d4c57c678ed0553976b2d187220b83"} Jan 04 12:10:59 crc kubenswrapper[5003]: I0104 12:10:59.779402 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.779377259 podStartE2EDuration="3.779377259s" podCreationTimestamp="2026-01-04 12:10:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:10:59.777076089 +0000 UTC m=+1375.250105930" watchObservedRunningTime="2026-01-04 12:10:59.779377259 +0000 UTC m=+1375.252407100" Jan 04 12:11:00 crc kubenswrapper[5003]: I0104 12:11:00.781399 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8e51a292-b57d-4dc8-bca6-65ff67198a27","Type":"ContainerStarted","Data":"8d0d093df26396876e1cfb66b4aab7d3609fe73cbecd4254a99372c4faa3974f"} Jan 04 12:11:01 crc kubenswrapper[5003]: I0104 12:11:01.914927 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 04 12:11:01 crc kubenswrapper[5003]: I0104 12:11:01.914980 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Jan 04 12:11:01 crc kubenswrapper[5003]: I0104 12:11:01.963618 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 04 12:11:01 crc kubenswrapper[5003]: I0104 12:11:01.979962 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Jan 04 12:11:02 crc kubenswrapper[5003]: I0104 12:11:02.800952 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 04 12:11:02 crc kubenswrapper[5003]: I0104 12:11:02.801420 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Jan 04 12:11:04 crc kubenswrapper[5003]: I0104 12:11:04.828839 5003 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 04 12:11:04 crc kubenswrapper[5003]: I0104 12:11:04.830430 5003 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 04 12:11:04 crc kubenswrapper[5003]: I0104 12:11:04.857642 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 04 12:11:04 crc kubenswrapper[5003]: I0104 12:11:04.865444 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Jan 04 12:11:05 crc kubenswrapper[5003]: I0104 12:11:05.840005 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8e51a292-b57d-4dc8-bca6-65ff67198a27","Type":"ContainerStarted","Data":"b8af2e75e9fca4f6c6847cef3a558f4d0159866a84bc282abf8b55a56b05d62a"} Jan 04 12:11:05 crc kubenswrapper[5003]: I0104 12:11:05.841613 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 04 12:11:05 crc kubenswrapper[5003]: I0104 12:11:05.841451 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8e51a292-b57d-4dc8-bca6-65ff67198a27" containerName="proxy-httpd" containerID="cri-o://b8af2e75e9fca4f6c6847cef3a558f4d0159866a84bc282abf8b55a56b05d62a" gracePeriod=30 Jan 04 12:11:05 crc kubenswrapper[5003]: I0104 12:11:05.841343 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8e51a292-b57d-4dc8-bca6-65ff67198a27" containerName="ceilometer-central-agent" containerID="cri-o://d18e34024e2cc26a8c19bd692f402ce02356e8651a1d5e2fcd08cebad31487e0" gracePeriod=30 Jan 04 12:11:05 crc kubenswrapper[5003]: I0104 12:11:05.841554 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8e51a292-b57d-4dc8-bca6-65ff67198a27" containerName="ceilometer-notification-agent" containerID="cri-o://31f431ee2e7394251280aa035ee0db15e9d4c57c678ed0553976b2d187220b83" gracePeriod=30 Jan 04 12:11:05 crc kubenswrapper[5003]: I0104 12:11:05.841600 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8e51a292-b57d-4dc8-bca6-65ff67198a27" containerName="sg-core" containerID="cri-o://8d0d093df26396876e1cfb66b4aab7d3609fe73cbecd4254a99372c4faa3974f" gracePeriod=30 Jan 04 12:11:05 crc kubenswrapper[5003]: I0104 12:11:05.847320 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-7z7gd" event={"ID":"4689f11c-e91e-4045-ae29-5377e6b8ae6e","Type":"ContainerStarted","Data":"ea94fc65feb70c04941c0d5486b524d70238803b24fc9b2c553adec3e7a57ef2"} Jan 04 12:11:05 crc kubenswrapper[5003]: I0104 12:11:05.891239 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.393011047 podStartE2EDuration="9.891200804s" podCreationTimestamp="2026-01-04 12:10:56 +0000 UTC" firstStartedPulling="2026-01-04 12:10:57.7449769 +0000 UTC m=+1373.218006741" lastFinishedPulling="2026-01-04 12:11:05.243166657 +0000 UTC m=+1380.716196498" observedRunningTime="2026-01-04 12:11:05.880624298 +0000 UTC m=+1381.353654139" watchObservedRunningTime="2026-01-04 12:11:05.891200804 +0000 UTC m=+1381.364230685" Jan 04 12:11:05 crc kubenswrapper[5003]: I0104 12:11:05.909298 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-7z7gd" podStartSLOduration=2.446165206 podStartE2EDuration="11.909280837s" podCreationTimestamp="2026-01-04 12:10:54 +0000 UTC" firstStartedPulling="2026-01-04 12:10:55.777662304 +0000 UTC m=+1371.250692145" lastFinishedPulling="2026-01-04 12:11:05.240777935 +0000 UTC m=+1380.713807776" observedRunningTime="2026-01-04 12:11:05.903497295 +0000 UTC m=+1381.376527176" watchObservedRunningTime="2026-01-04 12:11:05.909280837 +0000 UTC m=+1381.382310678" Jan 04 12:11:06 crc kubenswrapper[5003]: I0104 12:11:06.863966 5003 generic.go:334] "Generic (PLEG): container finished" podID="8e51a292-b57d-4dc8-bca6-65ff67198a27" containerID="b8af2e75e9fca4f6c6847cef3a558f4d0159866a84bc282abf8b55a56b05d62a" exitCode=0 Jan 04 12:11:06 crc kubenswrapper[5003]: I0104 12:11:06.864504 5003 generic.go:334] "Generic (PLEG): container finished" podID="8e51a292-b57d-4dc8-bca6-65ff67198a27" containerID="8d0d093df26396876e1cfb66b4aab7d3609fe73cbecd4254a99372c4faa3974f" exitCode=2 Jan 04 12:11:06 crc kubenswrapper[5003]: I0104 12:11:06.864526 5003 generic.go:334] "Generic (PLEG): container finished" podID="8e51a292-b57d-4dc8-bca6-65ff67198a27" containerID="31f431ee2e7394251280aa035ee0db15e9d4c57c678ed0553976b2d187220b83" exitCode=0 Jan 04 12:11:06 crc kubenswrapper[5003]: I0104 12:11:06.864165 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8e51a292-b57d-4dc8-bca6-65ff67198a27","Type":"ContainerDied","Data":"b8af2e75e9fca4f6c6847cef3a558f4d0159866a84bc282abf8b55a56b05d62a"} Jan 04 12:11:06 crc kubenswrapper[5003]: I0104 12:11:06.865266 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8e51a292-b57d-4dc8-bca6-65ff67198a27","Type":"ContainerDied","Data":"8d0d093df26396876e1cfb66b4aab7d3609fe73cbecd4254a99372c4faa3974f"} Jan 04 12:11:06 crc kubenswrapper[5003]: I0104 12:11:06.865283 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8e51a292-b57d-4dc8-bca6-65ff67198a27","Type":"ContainerDied","Data":"31f431ee2e7394251280aa035ee0db15e9d4c57c678ed0553976b2d187220b83"} Jan 04 12:11:07 crc kubenswrapper[5003]: I0104 12:11:07.162035 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 04 12:11:07 crc kubenswrapper[5003]: I0104 12:11:07.162439 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Jan 04 12:11:07 crc kubenswrapper[5003]: I0104 12:11:07.208194 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 04 12:11:07 crc kubenswrapper[5003]: I0104 12:11:07.216363 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Jan 04 12:11:07 crc kubenswrapper[5003]: I0104 12:11:07.886580 5003 generic.go:334] "Generic (PLEG): container finished" podID="8e51a292-b57d-4dc8-bca6-65ff67198a27" containerID="d18e34024e2cc26a8c19bd692f402ce02356e8651a1d5e2fcd08cebad31487e0" exitCode=0 Jan 04 12:11:07 crc kubenswrapper[5003]: I0104 12:11:07.887185 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8e51a292-b57d-4dc8-bca6-65ff67198a27","Type":"ContainerDied","Data":"d18e34024e2cc26a8c19bd692f402ce02356e8651a1d5e2fcd08cebad31487e0"} Jan 04 12:11:07 crc kubenswrapper[5003]: I0104 12:11:07.887796 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 04 12:11:07 crc kubenswrapper[5003]: I0104 12:11:07.887855 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.146130 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.221328 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e51a292-b57d-4dc8-bca6-65ff67198a27-config-data\") pod \"8e51a292-b57d-4dc8-bca6-65ff67198a27\" (UID: \"8e51a292-b57d-4dc8-bca6-65ff67198a27\") " Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.221448 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e51a292-b57d-4dc8-bca6-65ff67198a27-scripts\") pod \"8e51a292-b57d-4dc8-bca6-65ff67198a27\" (UID: \"8e51a292-b57d-4dc8-bca6-65ff67198a27\") " Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.221502 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e51a292-b57d-4dc8-bca6-65ff67198a27-log-httpd\") pod \"8e51a292-b57d-4dc8-bca6-65ff67198a27\" (UID: \"8e51a292-b57d-4dc8-bca6-65ff67198a27\") " Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.221549 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8e51a292-b57d-4dc8-bca6-65ff67198a27-sg-core-conf-yaml\") pod \"8e51a292-b57d-4dc8-bca6-65ff67198a27\" (UID: \"8e51a292-b57d-4dc8-bca6-65ff67198a27\") " Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.221587 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e51a292-b57d-4dc8-bca6-65ff67198a27-run-httpd\") pod \"8e51a292-b57d-4dc8-bca6-65ff67198a27\" (UID: \"8e51a292-b57d-4dc8-bca6-65ff67198a27\") " Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.221606 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e51a292-b57d-4dc8-bca6-65ff67198a27-combined-ca-bundle\") pod \"8e51a292-b57d-4dc8-bca6-65ff67198a27\" (UID: \"8e51a292-b57d-4dc8-bca6-65ff67198a27\") " Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.221633 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8r86l\" (UniqueName: \"kubernetes.io/projected/8e51a292-b57d-4dc8-bca6-65ff67198a27-kube-api-access-8r86l\") pod \"8e51a292-b57d-4dc8-bca6-65ff67198a27\" (UID: \"8e51a292-b57d-4dc8-bca6-65ff67198a27\") " Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.222739 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e51a292-b57d-4dc8-bca6-65ff67198a27-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "8e51a292-b57d-4dc8-bca6-65ff67198a27" (UID: "8e51a292-b57d-4dc8-bca6-65ff67198a27"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.222960 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e51a292-b57d-4dc8-bca6-65ff67198a27-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "8e51a292-b57d-4dc8-bca6-65ff67198a27" (UID: "8e51a292-b57d-4dc8-bca6-65ff67198a27"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.232105 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e51a292-b57d-4dc8-bca6-65ff67198a27-kube-api-access-8r86l" (OuterVolumeSpecName: "kube-api-access-8r86l") pod "8e51a292-b57d-4dc8-bca6-65ff67198a27" (UID: "8e51a292-b57d-4dc8-bca6-65ff67198a27"). InnerVolumeSpecName "kube-api-access-8r86l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.237583 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e51a292-b57d-4dc8-bca6-65ff67198a27-scripts" (OuterVolumeSpecName: "scripts") pod "8e51a292-b57d-4dc8-bca6-65ff67198a27" (UID: "8e51a292-b57d-4dc8-bca6-65ff67198a27"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.266064 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e51a292-b57d-4dc8-bca6-65ff67198a27-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "8e51a292-b57d-4dc8-bca6-65ff67198a27" (UID: "8e51a292-b57d-4dc8-bca6-65ff67198a27"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.324074 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e51a292-b57d-4dc8-bca6-65ff67198a27-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.324417 5003 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e51a292-b57d-4dc8-bca6-65ff67198a27-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.324504 5003 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8e51a292-b57d-4dc8-bca6-65ff67198a27-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.324588 5003 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e51a292-b57d-4dc8-bca6-65ff67198a27-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.324661 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8r86l\" (UniqueName: \"kubernetes.io/projected/8e51a292-b57d-4dc8-bca6-65ff67198a27-kube-api-access-8r86l\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.332391 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e51a292-b57d-4dc8-bca6-65ff67198a27-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8e51a292-b57d-4dc8-bca6-65ff67198a27" (UID: "8e51a292-b57d-4dc8-bca6-65ff67198a27"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.349108 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e51a292-b57d-4dc8-bca6-65ff67198a27-config-data" (OuterVolumeSpecName: "config-data") pod "8e51a292-b57d-4dc8-bca6-65ff67198a27" (UID: "8e51a292-b57d-4dc8-bca6-65ff67198a27"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.427606 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e51a292-b57d-4dc8-bca6-65ff67198a27-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.427690 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e51a292-b57d-4dc8-bca6-65ff67198a27-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.899888 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.900068 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8e51a292-b57d-4dc8-bca6-65ff67198a27","Type":"ContainerDied","Data":"50089b424f4ec1dec86a03457813d4d56c8e800fd2b0810a8738fe0f8955cdd5"} Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.901084 5003 scope.go:117] "RemoveContainer" containerID="b8af2e75e9fca4f6c6847cef3a558f4d0159866a84bc282abf8b55a56b05d62a" Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.932102 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.958710 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.970187 5003 scope.go:117] "RemoveContainer" containerID="8d0d093df26396876e1cfb66b4aab7d3609fe73cbecd4254a99372c4faa3974f" Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.978627 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:11:08 crc kubenswrapper[5003]: E0104 12:11:08.979124 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e51a292-b57d-4dc8-bca6-65ff67198a27" containerName="ceilometer-notification-agent" Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.979141 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e51a292-b57d-4dc8-bca6-65ff67198a27" containerName="ceilometer-notification-agent" Jan 04 12:11:08 crc kubenswrapper[5003]: E0104 12:11:08.979160 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e51a292-b57d-4dc8-bca6-65ff67198a27" containerName="proxy-httpd" Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.979171 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e51a292-b57d-4dc8-bca6-65ff67198a27" containerName="proxy-httpd" Jan 04 12:11:08 crc kubenswrapper[5003]: E0104 12:11:08.979183 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e51a292-b57d-4dc8-bca6-65ff67198a27" containerName="ceilometer-central-agent" Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.979191 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e51a292-b57d-4dc8-bca6-65ff67198a27" containerName="ceilometer-central-agent" Jan 04 12:11:08 crc kubenswrapper[5003]: E0104 12:11:08.979211 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e51a292-b57d-4dc8-bca6-65ff67198a27" containerName="sg-core" Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.979218 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e51a292-b57d-4dc8-bca6-65ff67198a27" containerName="sg-core" Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.979417 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e51a292-b57d-4dc8-bca6-65ff67198a27" containerName="ceilometer-notification-agent" Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.979433 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e51a292-b57d-4dc8-bca6-65ff67198a27" containerName="proxy-httpd" Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.979454 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e51a292-b57d-4dc8-bca6-65ff67198a27" containerName="sg-core" Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.979470 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e51a292-b57d-4dc8-bca6-65ff67198a27" containerName="ceilometer-central-agent" Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.983164 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.987186 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.987542 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 04 12:11:08 crc kubenswrapper[5003]: I0104 12:11:08.995726 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.048533 5003 scope.go:117] "RemoveContainer" containerID="31f431ee2e7394251280aa035ee0db15e9d4c57c678ed0553976b2d187220b83" Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.094613 5003 scope.go:117] "RemoveContainer" containerID="d18e34024e2cc26a8c19bd692f402ce02356e8651a1d5e2fcd08cebad31487e0" Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.143626 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a279a318-6850-44fe-9ff0-f640e3e8870b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a279a318-6850-44fe-9ff0-f640e3e8870b\") " pod="openstack/ceilometer-0" Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.143798 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a279a318-6850-44fe-9ff0-f640e3e8870b-config-data\") pod \"ceilometer-0\" (UID: \"a279a318-6850-44fe-9ff0-f640e3e8870b\") " pod="openstack/ceilometer-0" Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.143941 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swhs7\" (UniqueName: \"kubernetes.io/projected/a279a318-6850-44fe-9ff0-f640e3e8870b-kube-api-access-swhs7\") pod \"ceilometer-0\" (UID: \"a279a318-6850-44fe-9ff0-f640e3e8870b\") " pod="openstack/ceilometer-0" Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.143982 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a279a318-6850-44fe-9ff0-f640e3e8870b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a279a318-6850-44fe-9ff0-f640e3e8870b\") " pod="openstack/ceilometer-0" Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.144221 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a279a318-6850-44fe-9ff0-f640e3e8870b-scripts\") pod \"ceilometer-0\" (UID: \"a279a318-6850-44fe-9ff0-f640e3e8870b\") " pod="openstack/ceilometer-0" Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.144324 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a279a318-6850-44fe-9ff0-f640e3e8870b-run-httpd\") pod \"ceilometer-0\" (UID: \"a279a318-6850-44fe-9ff0-f640e3e8870b\") " pod="openstack/ceilometer-0" Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.144470 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a279a318-6850-44fe-9ff0-f640e3e8870b-log-httpd\") pod \"ceilometer-0\" (UID: \"a279a318-6850-44fe-9ff0-f640e3e8870b\") " pod="openstack/ceilometer-0" Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.246645 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a279a318-6850-44fe-9ff0-f640e3e8870b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a279a318-6850-44fe-9ff0-f640e3e8870b\") " pod="openstack/ceilometer-0" Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.246944 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a279a318-6850-44fe-9ff0-f640e3e8870b-config-data\") pod \"ceilometer-0\" (UID: \"a279a318-6850-44fe-9ff0-f640e3e8870b\") " pod="openstack/ceilometer-0" Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.247125 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swhs7\" (UniqueName: \"kubernetes.io/projected/a279a318-6850-44fe-9ff0-f640e3e8870b-kube-api-access-swhs7\") pod \"ceilometer-0\" (UID: \"a279a318-6850-44fe-9ff0-f640e3e8870b\") " pod="openstack/ceilometer-0" Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.247252 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a279a318-6850-44fe-9ff0-f640e3e8870b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a279a318-6850-44fe-9ff0-f640e3e8870b\") " pod="openstack/ceilometer-0" Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.247424 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a279a318-6850-44fe-9ff0-f640e3e8870b-scripts\") pod \"ceilometer-0\" (UID: \"a279a318-6850-44fe-9ff0-f640e3e8870b\") " pod="openstack/ceilometer-0" Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.247563 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a279a318-6850-44fe-9ff0-f640e3e8870b-run-httpd\") pod \"ceilometer-0\" (UID: \"a279a318-6850-44fe-9ff0-f640e3e8870b\") " pod="openstack/ceilometer-0" Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.247782 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a279a318-6850-44fe-9ff0-f640e3e8870b-log-httpd\") pod \"ceilometer-0\" (UID: \"a279a318-6850-44fe-9ff0-f640e3e8870b\") " pod="openstack/ceilometer-0" Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.248137 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a279a318-6850-44fe-9ff0-f640e3e8870b-run-httpd\") pod \"ceilometer-0\" (UID: \"a279a318-6850-44fe-9ff0-f640e3e8870b\") " pod="openstack/ceilometer-0" Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.251321 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a279a318-6850-44fe-9ff0-f640e3e8870b-log-httpd\") pod \"ceilometer-0\" (UID: \"a279a318-6850-44fe-9ff0-f640e3e8870b\") " pod="openstack/ceilometer-0" Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.252357 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a279a318-6850-44fe-9ff0-f640e3e8870b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a279a318-6850-44fe-9ff0-f640e3e8870b\") " pod="openstack/ceilometer-0" Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.252949 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a279a318-6850-44fe-9ff0-f640e3e8870b-scripts\") pod \"ceilometer-0\" (UID: \"a279a318-6850-44fe-9ff0-f640e3e8870b\") " pod="openstack/ceilometer-0" Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.252948 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a279a318-6850-44fe-9ff0-f640e3e8870b-config-data\") pod \"ceilometer-0\" (UID: \"a279a318-6850-44fe-9ff0-f640e3e8870b\") " pod="openstack/ceilometer-0" Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.253094 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a279a318-6850-44fe-9ff0-f640e3e8870b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a279a318-6850-44fe-9ff0-f640e3e8870b\") " pod="openstack/ceilometer-0" Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.268537 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swhs7\" (UniqueName: \"kubernetes.io/projected/a279a318-6850-44fe-9ff0-f640e3e8870b-kube-api-access-swhs7\") pod \"ceilometer-0\" (UID: \"a279a318-6850-44fe-9ff0-f640e3e8870b\") " pod="openstack/ceilometer-0" Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.330984 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.418593 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.419062 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.419108 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.419854 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"69783d40bb0d702bd7a771a35f8a0c04b3ee78e8c80ee725ec241cde3249b382"} pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.419904 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" containerID="cri-o://69783d40bb0d702bd7a771a35f8a0c04b3ee78e8c80ee725ec241cde3249b382" gracePeriod=600 Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.875474 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:11:09 crc kubenswrapper[5003]: W0104 12:11:09.877918 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda279a318_6850_44fe_9ff0_f640e3e8870b.slice/crio-f0fbde16b426a0f2fd1c0f22b22bbdb72c59af935b0232858edd0386c9fdac45 WatchSource:0}: Error finding container f0fbde16b426a0f2fd1c0f22b22bbdb72c59af935b0232858edd0386c9fdac45: Status 404 returned error can't find the container with id f0fbde16b426a0f2fd1c0f22b22bbdb72c59af935b0232858edd0386c9fdac45 Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.912290 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a279a318-6850-44fe-9ff0-f640e3e8870b","Type":"ContainerStarted","Data":"f0fbde16b426a0f2fd1c0f22b22bbdb72c59af935b0232858edd0386c9fdac45"} Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.921166 5003 generic.go:334] "Generic (PLEG): container finished" podID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerID="69783d40bb0d702bd7a771a35f8a0c04b3ee78e8c80ee725ec241cde3249b382" exitCode=0 Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.921234 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerDied","Data":"69783d40bb0d702bd7a771a35f8a0c04b3ee78e8c80ee725ec241cde3249b382"} Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.921271 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerStarted","Data":"83b49e76a6bec0f9f75f0fc3c25e68790cc09e4dc277f44d33d51646ab219821"} Jan 04 12:11:09 crc kubenswrapper[5003]: I0104 12:11:09.921293 5003 scope.go:117] "RemoveContainer" containerID="8805b4e8959ecfe4ed6d4f63f07630a7e965c7249b0c281ceae8ee8943118856" Jan 04 12:11:10 crc kubenswrapper[5003]: I0104 12:11:10.036150 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 04 12:11:10 crc kubenswrapper[5003]: I0104 12:11:10.036992 5003 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 04 12:11:10 crc kubenswrapper[5003]: I0104 12:11:10.190194 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Jan 04 12:11:10 crc kubenswrapper[5003]: I0104 12:11:10.818325 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e51a292-b57d-4dc8-bca6-65ff67198a27" path="/var/lib/kubelet/pods/8e51a292-b57d-4dc8-bca6-65ff67198a27/volumes" Jan 04 12:11:11 crc kubenswrapper[5003]: I0104 12:11:11.116411 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:11:11 crc kubenswrapper[5003]: I0104 12:11:11.954380 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a279a318-6850-44fe-9ff0-f640e3e8870b","Type":"ContainerStarted","Data":"8886e0536a13c8425258493d8c22c414d683cde3a556de7b51afee10b64e3337"} Jan 04 12:11:11 crc kubenswrapper[5003]: I0104 12:11:11.954885 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a279a318-6850-44fe-9ff0-f640e3e8870b","Type":"ContainerStarted","Data":"9382ae95275d5d300c14fcc11440971c1c5a41567731dfa2759ead9813a306cf"} Jan 04 12:11:12 crc kubenswrapper[5003]: I0104 12:11:12.969752 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a279a318-6850-44fe-9ff0-f640e3e8870b","Type":"ContainerStarted","Data":"14f9245badf6fb0e666d131fb16aacc80d191b676102d7d058c199c3934c3e62"} Jan 04 12:11:13 crc kubenswrapper[5003]: I0104 12:11:13.983984 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a279a318-6850-44fe-9ff0-f640e3e8870b","Type":"ContainerStarted","Data":"c5e240e180d37f3b1229501e8405dc4f9e8dd86dfea3a3e1ccb3f308ae7a92e1"} Jan 04 12:11:13 crc kubenswrapper[5003]: I0104 12:11:13.985768 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a279a318-6850-44fe-9ff0-f640e3e8870b" containerName="ceilometer-central-agent" containerID="cri-o://9382ae95275d5d300c14fcc11440971c1c5a41567731dfa2759ead9813a306cf" gracePeriod=30 Jan 04 12:11:13 crc kubenswrapper[5003]: I0104 12:11:13.985852 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 04 12:11:13 crc kubenswrapper[5003]: I0104 12:11:13.985792 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a279a318-6850-44fe-9ff0-f640e3e8870b" containerName="proxy-httpd" containerID="cri-o://c5e240e180d37f3b1229501e8405dc4f9e8dd86dfea3a3e1ccb3f308ae7a92e1" gracePeriod=30 Jan 04 12:11:13 crc kubenswrapper[5003]: I0104 12:11:13.985936 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a279a318-6850-44fe-9ff0-f640e3e8870b" containerName="ceilometer-notification-agent" containerID="cri-o://8886e0536a13c8425258493d8c22c414d683cde3a556de7b51afee10b64e3337" gracePeriod=30 Jan 04 12:11:13 crc kubenswrapper[5003]: I0104 12:11:13.986147 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a279a318-6850-44fe-9ff0-f640e3e8870b" containerName="sg-core" containerID="cri-o://14f9245badf6fb0e666d131fb16aacc80d191b676102d7d058c199c3934c3e62" gracePeriod=30 Jan 04 12:11:14 crc kubenswrapper[5003]: I0104 12:11:14.026006 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.283443765 podStartE2EDuration="6.025984349s" podCreationTimestamp="2026-01-04 12:11:08 +0000 UTC" firstStartedPulling="2026-01-04 12:11:09.880597618 +0000 UTC m=+1385.353627459" lastFinishedPulling="2026-01-04 12:11:13.623138202 +0000 UTC m=+1389.096168043" observedRunningTime="2026-01-04 12:11:14.020519987 +0000 UTC m=+1389.493549818" watchObservedRunningTime="2026-01-04 12:11:14.025984349 +0000 UTC m=+1389.499014190" Jan 04 12:11:14 crc kubenswrapper[5003]: I0104 12:11:14.995955 5003 generic.go:334] "Generic (PLEG): container finished" podID="a279a318-6850-44fe-9ff0-f640e3e8870b" containerID="c5e240e180d37f3b1229501e8405dc4f9e8dd86dfea3a3e1ccb3f308ae7a92e1" exitCode=0 Jan 04 12:11:14 crc kubenswrapper[5003]: I0104 12:11:14.996707 5003 generic.go:334] "Generic (PLEG): container finished" podID="a279a318-6850-44fe-9ff0-f640e3e8870b" containerID="14f9245badf6fb0e666d131fb16aacc80d191b676102d7d058c199c3934c3e62" exitCode=2 Jan 04 12:11:14 crc kubenswrapper[5003]: I0104 12:11:14.996717 5003 generic.go:334] "Generic (PLEG): container finished" podID="a279a318-6850-44fe-9ff0-f640e3e8870b" containerID="8886e0536a13c8425258493d8c22c414d683cde3a556de7b51afee10b64e3337" exitCode=0 Jan 04 12:11:14 crc kubenswrapper[5003]: I0104 12:11:14.996079 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a279a318-6850-44fe-9ff0-f640e3e8870b","Type":"ContainerDied","Data":"c5e240e180d37f3b1229501e8405dc4f9e8dd86dfea3a3e1ccb3f308ae7a92e1"} Jan 04 12:11:14 crc kubenswrapper[5003]: I0104 12:11:14.996756 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a279a318-6850-44fe-9ff0-f640e3e8870b","Type":"ContainerDied","Data":"14f9245badf6fb0e666d131fb16aacc80d191b676102d7d058c199c3934c3e62"} Jan 04 12:11:14 crc kubenswrapper[5003]: I0104 12:11:14.996774 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a279a318-6850-44fe-9ff0-f640e3e8870b","Type":"ContainerDied","Data":"8886e0536a13c8425258493d8c22c414d683cde3a556de7b51afee10b64e3337"} Jan 04 12:11:17 crc kubenswrapper[5003]: I0104 12:11:17.018868 5003 generic.go:334] "Generic (PLEG): container finished" podID="4689f11c-e91e-4045-ae29-5377e6b8ae6e" containerID="ea94fc65feb70c04941c0d5486b524d70238803b24fc9b2c553adec3e7a57ef2" exitCode=0 Jan 04 12:11:17 crc kubenswrapper[5003]: I0104 12:11:17.018932 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-7z7gd" event={"ID":"4689f11c-e91e-4045-ae29-5377e6b8ae6e","Type":"ContainerDied","Data":"ea94fc65feb70c04941c0d5486b524d70238803b24fc9b2c553adec3e7a57ef2"} Jan 04 12:11:18 crc kubenswrapper[5003]: I0104 12:11:18.506000 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-7z7gd" Jan 04 12:11:18 crc kubenswrapper[5003]: I0104 12:11:18.651428 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4689f11c-e91e-4045-ae29-5377e6b8ae6e-config-data\") pod \"4689f11c-e91e-4045-ae29-5377e6b8ae6e\" (UID: \"4689f11c-e91e-4045-ae29-5377e6b8ae6e\") " Jan 04 12:11:18 crc kubenswrapper[5003]: I0104 12:11:18.651645 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jrzvs\" (UniqueName: \"kubernetes.io/projected/4689f11c-e91e-4045-ae29-5377e6b8ae6e-kube-api-access-jrzvs\") pod \"4689f11c-e91e-4045-ae29-5377e6b8ae6e\" (UID: \"4689f11c-e91e-4045-ae29-5377e6b8ae6e\") " Jan 04 12:11:18 crc kubenswrapper[5003]: I0104 12:11:18.651851 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4689f11c-e91e-4045-ae29-5377e6b8ae6e-scripts\") pod \"4689f11c-e91e-4045-ae29-5377e6b8ae6e\" (UID: \"4689f11c-e91e-4045-ae29-5377e6b8ae6e\") " Jan 04 12:11:18 crc kubenswrapper[5003]: I0104 12:11:18.651908 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4689f11c-e91e-4045-ae29-5377e6b8ae6e-combined-ca-bundle\") pod \"4689f11c-e91e-4045-ae29-5377e6b8ae6e\" (UID: \"4689f11c-e91e-4045-ae29-5377e6b8ae6e\") " Jan 04 12:11:18 crc kubenswrapper[5003]: I0104 12:11:18.660454 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4689f11c-e91e-4045-ae29-5377e6b8ae6e-kube-api-access-jrzvs" (OuterVolumeSpecName: "kube-api-access-jrzvs") pod "4689f11c-e91e-4045-ae29-5377e6b8ae6e" (UID: "4689f11c-e91e-4045-ae29-5377e6b8ae6e"). InnerVolumeSpecName "kube-api-access-jrzvs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:11:18 crc kubenswrapper[5003]: I0104 12:11:18.661146 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4689f11c-e91e-4045-ae29-5377e6b8ae6e-scripts" (OuterVolumeSpecName: "scripts") pod "4689f11c-e91e-4045-ae29-5377e6b8ae6e" (UID: "4689f11c-e91e-4045-ae29-5377e6b8ae6e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:18 crc kubenswrapper[5003]: I0104 12:11:18.690621 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4689f11c-e91e-4045-ae29-5377e6b8ae6e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4689f11c-e91e-4045-ae29-5377e6b8ae6e" (UID: "4689f11c-e91e-4045-ae29-5377e6b8ae6e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:18 crc kubenswrapper[5003]: I0104 12:11:18.699943 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4689f11c-e91e-4045-ae29-5377e6b8ae6e-config-data" (OuterVolumeSpecName: "config-data") pod "4689f11c-e91e-4045-ae29-5377e6b8ae6e" (UID: "4689f11c-e91e-4045-ae29-5377e6b8ae6e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:18 crc kubenswrapper[5003]: I0104 12:11:18.755163 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jrzvs\" (UniqueName: \"kubernetes.io/projected/4689f11c-e91e-4045-ae29-5377e6b8ae6e-kube-api-access-jrzvs\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:18 crc kubenswrapper[5003]: I0104 12:11:18.755204 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4689f11c-e91e-4045-ae29-5377e6b8ae6e-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:18 crc kubenswrapper[5003]: I0104 12:11:18.755216 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4689f11c-e91e-4045-ae29-5377e6b8ae6e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:18 crc kubenswrapper[5003]: I0104 12:11:18.755227 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4689f11c-e91e-4045-ae29-5377e6b8ae6e-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:19 crc kubenswrapper[5003]: I0104 12:11:19.047367 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-7z7gd" event={"ID":"4689f11c-e91e-4045-ae29-5377e6b8ae6e","Type":"ContainerDied","Data":"bd637d0bcc4ae556d6d2470c852f53dd91c6fc516b6506c2b3938b8976598afb"} Jan 04 12:11:19 crc kubenswrapper[5003]: I0104 12:11:19.047417 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bd637d0bcc4ae556d6d2470c852f53dd91c6fc516b6506c2b3938b8976598afb" Jan 04 12:11:19 crc kubenswrapper[5003]: I0104 12:11:19.047480 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-7z7gd" Jan 04 12:11:19 crc kubenswrapper[5003]: I0104 12:11:19.246737 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 04 12:11:19 crc kubenswrapper[5003]: E0104 12:11:19.247443 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4689f11c-e91e-4045-ae29-5377e6b8ae6e" containerName="nova-cell0-conductor-db-sync" Jan 04 12:11:19 crc kubenswrapper[5003]: I0104 12:11:19.247536 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="4689f11c-e91e-4045-ae29-5377e6b8ae6e" containerName="nova-cell0-conductor-db-sync" Jan 04 12:11:19 crc kubenswrapper[5003]: I0104 12:11:19.260642 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="4689f11c-e91e-4045-ae29-5377e6b8ae6e" containerName="nova-cell0-conductor-db-sync" Jan 04 12:11:19 crc kubenswrapper[5003]: I0104 12:11:19.262247 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 04 12:11:19 crc kubenswrapper[5003]: I0104 12:11:19.271108 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-lh872" Jan 04 12:11:19 crc kubenswrapper[5003]: I0104 12:11:19.271314 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Jan 04 12:11:19 crc kubenswrapper[5003]: I0104 12:11:19.298153 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 04 12:11:19 crc kubenswrapper[5003]: I0104 12:11:19.372523 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28122a6a-8b54-4ff3-9092-a1f7439a35cf-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"28122a6a-8b54-4ff3-9092-a1f7439a35cf\") " pod="openstack/nova-cell0-conductor-0" Jan 04 12:11:19 crc kubenswrapper[5003]: I0104 12:11:19.372810 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28122a6a-8b54-4ff3-9092-a1f7439a35cf-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"28122a6a-8b54-4ff3-9092-a1f7439a35cf\") " pod="openstack/nova-cell0-conductor-0" Jan 04 12:11:19 crc kubenswrapper[5003]: I0104 12:11:19.372941 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjm4f\" (UniqueName: \"kubernetes.io/projected/28122a6a-8b54-4ff3-9092-a1f7439a35cf-kube-api-access-rjm4f\") pod \"nova-cell0-conductor-0\" (UID: \"28122a6a-8b54-4ff3-9092-a1f7439a35cf\") " pod="openstack/nova-cell0-conductor-0" Jan 04 12:11:19 crc kubenswrapper[5003]: I0104 12:11:19.475301 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28122a6a-8b54-4ff3-9092-a1f7439a35cf-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"28122a6a-8b54-4ff3-9092-a1f7439a35cf\") " pod="openstack/nova-cell0-conductor-0" Jan 04 12:11:19 crc kubenswrapper[5003]: I0104 12:11:19.475379 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28122a6a-8b54-4ff3-9092-a1f7439a35cf-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"28122a6a-8b54-4ff3-9092-a1f7439a35cf\") " pod="openstack/nova-cell0-conductor-0" Jan 04 12:11:19 crc kubenswrapper[5003]: I0104 12:11:19.475444 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjm4f\" (UniqueName: \"kubernetes.io/projected/28122a6a-8b54-4ff3-9092-a1f7439a35cf-kube-api-access-rjm4f\") pod \"nova-cell0-conductor-0\" (UID: \"28122a6a-8b54-4ff3-9092-a1f7439a35cf\") " pod="openstack/nova-cell0-conductor-0" Jan 04 12:11:19 crc kubenswrapper[5003]: I0104 12:11:19.486941 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28122a6a-8b54-4ff3-9092-a1f7439a35cf-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"28122a6a-8b54-4ff3-9092-a1f7439a35cf\") " pod="openstack/nova-cell0-conductor-0" Jan 04 12:11:19 crc kubenswrapper[5003]: I0104 12:11:19.487412 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28122a6a-8b54-4ff3-9092-a1f7439a35cf-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"28122a6a-8b54-4ff3-9092-a1f7439a35cf\") " pod="openstack/nova-cell0-conductor-0" Jan 04 12:11:19 crc kubenswrapper[5003]: I0104 12:11:19.498836 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjm4f\" (UniqueName: \"kubernetes.io/projected/28122a6a-8b54-4ff3-9092-a1f7439a35cf-kube-api-access-rjm4f\") pod \"nova-cell0-conductor-0\" (UID: \"28122a6a-8b54-4ff3-9092-a1f7439a35cf\") " pod="openstack/nova-cell0-conductor-0" Jan 04 12:11:19 crc kubenswrapper[5003]: I0104 12:11:19.644277 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 04 12:11:19 crc kubenswrapper[5003]: I0104 12:11:19.925661 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 04 12:11:20 crc kubenswrapper[5003]: I0104 12:11:20.060275 5003 generic.go:334] "Generic (PLEG): container finished" podID="a279a318-6850-44fe-9ff0-f640e3e8870b" containerID="9382ae95275d5d300c14fcc11440971c1c5a41567731dfa2759ead9813a306cf" exitCode=0 Jan 04 12:11:20 crc kubenswrapper[5003]: I0104 12:11:20.060370 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a279a318-6850-44fe-9ff0-f640e3e8870b","Type":"ContainerDied","Data":"9382ae95275d5d300c14fcc11440971c1c5a41567731dfa2759ead9813a306cf"} Jan 04 12:11:20 crc kubenswrapper[5003]: I0104 12:11:20.062495 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"28122a6a-8b54-4ff3-9092-a1f7439a35cf","Type":"ContainerStarted","Data":"9e8de1c7eb573065126ac170b63253962cc73824f45c5ce5383a20fee3a33e6f"} Jan 04 12:11:20 crc kubenswrapper[5003]: I0104 12:11:20.139900 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rgh64"] Jan 04 12:11:20 crc kubenswrapper[5003]: I0104 12:11:20.142551 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rgh64" Jan 04 12:11:20 crc kubenswrapper[5003]: I0104 12:11:20.152618 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rgh64"] Jan 04 12:11:20 crc kubenswrapper[5003]: I0104 12:11:20.293765 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d0624ef-34a3-4ea3-b7ac-1c8415a3080a-catalog-content\") pod \"redhat-operators-rgh64\" (UID: \"4d0624ef-34a3-4ea3-b7ac-1c8415a3080a\") " pod="openshift-marketplace/redhat-operators-rgh64" Jan 04 12:11:20 crc kubenswrapper[5003]: I0104 12:11:20.294324 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9nj9q\" (UniqueName: \"kubernetes.io/projected/4d0624ef-34a3-4ea3-b7ac-1c8415a3080a-kube-api-access-9nj9q\") pod \"redhat-operators-rgh64\" (UID: \"4d0624ef-34a3-4ea3-b7ac-1c8415a3080a\") " pod="openshift-marketplace/redhat-operators-rgh64" Jan 04 12:11:20 crc kubenswrapper[5003]: I0104 12:11:20.294468 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d0624ef-34a3-4ea3-b7ac-1c8415a3080a-utilities\") pod \"redhat-operators-rgh64\" (UID: \"4d0624ef-34a3-4ea3-b7ac-1c8415a3080a\") " pod="openshift-marketplace/redhat-operators-rgh64" Jan 04 12:11:20 crc kubenswrapper[5003]: I0104 12:11:20.397082 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d0624ef-34a3-4ea3-b7ac-1c8415a3080a-catalog-content\") pod \"redhat-operators-rgh64\" (UID: \"4d0624ef-34a3-4ea3-b7ac-1c8415a3080a\") " pod="openshift-marketplace/redhat-operators-rgh64" Jan 04 12:11:20 crc kubenswrapper[5003]: I0104 12:11:20.398255 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9nj9q\" (UniqueName: \"kubernetes.io/projected/4d0624ef-34a3-4ea3-b7ac-1c8415a3080a-kube-api-access-9nj9q\") pod \"redhat-operators-rgh64\" (UID: \"4d0624ef-34a3-4ea3-b7ac-1c8415a3080a\") " pod="openshift-marketplace/redhat-operators-rgh64" Jan 04 12:11:20 crc kubenswrapper[5003]: I0104 12:11:20.398411 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d0624ef-34a3-4ea3-b7ac-1c8415a3080a-utilities\") pod \"redhat-operators-rgh64\" (UID: \"4d0624ef-34a3-4ea3-b7ac-1c8415a3080a\") " pod="openshift-marketplace/redhat-operators-rgh64" Jan 04 12:11:20 crc kubenswrapper[5003]: I0104 12:11:20.397698 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d0624ef-34a3-4ea3-b7ac-1c8415a3080a-catalog-content\") pod \"redhat-operators-rgh64\" (UID: \"4d0624ef-34a3-4ea3-b7ac-1c8415a3080a\") " pod="openshift-marketplace/redhat-operators-rgh64" Jan 04 12:11:20 crc kubenswrapper[5003]: I0104 12:11:20.398989 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d0624ef-34a3-4ea3-b7ac-1c8415a3080a-utilities\") pod \"redhat-operators-rgh64\" (UID: \"4d0624ef-34a3-4ea3-b7ac-1c8415a3080a\") " pod="openshift-marketplace/redhat-operators-rgh64" Jan 04 12:11:20 crc kubenswrapper[5003]: I0104 12:11:20.419245 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9nj9q\" (UniqueName: \"kubernetes.io/projected/4d0624ef-34a3-4ea3-b7ac-1c8415a3080a-kube-api-access-9nj9q\") pod \"redhat-operators-rgh64\" (UID: \"4d0624ef-34a3-4ea3-b7ac-1c8415a3080a\") " pod="openshift-marketplace/redhat-operators-rgh64" Jan 04 12:11:20 crc kubenswrapper[5003]: I0104 12:11:20.474706 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rgh64" Jan 04 12:11:20 crc kubenswrapper[5003]: I0104 12:11:20.851647 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:11:20 crc kubenswrapper[5003]: I0104 12:11:20.909941 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a279a318-6850-44fe-9ff0-f640e3e8870b-scripts\") pod \"a279a318-6850-44fe-9ff0-f640e3e8870b\" (UID: \"a279a318-6850-44fe-9ff0-f640e3e8870b\") " Jan 04 12:11:20 crc kubenswrapper[5003]: I0104 12:11:20.910188 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a279a318-6850-44fe-9ff0-f640e3e8870b-config-data\") pod \"a279a318-6850-44fe-9ff0-f640e3e8870b\" (UID: \"a279a318-6850-44fe-9ff0-f640e3e8870b\") " Jan 04 12:11:20 crc kubenswrapper[5003]: I0104 12:11:20.910235 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a279a318-6850-44fe-9ff0-f640e3e8870b-combined-ca-bundle\") pod \"a279a318-6850-44fe-9ff0-f640e3e8870b\" (UID: \"a279a318-6850-44fe-9ff0-f640e3e8870b\") " Jan 04 12:11:20 crc kubenswrapper[5003]: I0104 12:11:20.910292 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a279a318-6850-44fe-9ff0-f640e3e8870b-run-httpd\") pod \"a279a318-6850-44fe-9ff0-f640e3e8870b\" (UID: \"a279a318-6850-44fe-9ff0-f640e3e8870b\") " Jan 04 12:11:20 crc kubenswrapper[5003]: I0104 12:11:20.910374 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-swhs7\" (UniqueName: \"kubernetes.io/projected/a279a318-6850-44fe-9ff0-f640e3e8870b-kube-api-access-swhs7\") pod \"a279a318-6850-44fe-9ff0-f640e3e8870b\" (UID: \"a279a318-6850-44fe-9ff0-f640e3e8870b\") " Jan 04 12:11:20 crc kubenswrapper[5003]: I0104 12:11:20.910413 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a279a318-6850-44fe-9ff0-f640e3e8870b-sg-core-conf-yaml\") pod \"a279a318-6850-44fe-9ff0-f640e3e8870b\" (UID: \"a279a318-6850-44fe-9ff0-f640e3e8870b\") " Jan 04 12:11:20 crc kubenswrapper[5003]: I0104 12:11:20.910473 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a279a318-6850-44fe-9ff0-f640e3e8870b-log-httpd\") pod \"a279a318-6850-44fe-9ff0-f640e3e8870b\" (UID: \"a279a318-6850-44fe-9ff0-f640e3e8870b\") " Jan 04 12:11:20 crc kubenswrapper[5003]: I0104 12:11:20.915665 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a279a318-6850-44fe-9ff0-f640e3e8870b-scripts" (OuterVolumeSpecName: "scripts") pod "a279a318-6850-44fe-9ff0-f640e3e8870b" (UID: "a279a318-6850-44fe-9ff0-f640e3e8870b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:20 crc kubenswrapper[5003]: I0104 12:11:20.916626 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a279a318-6850-44fe-9ff0-f640e3e8870b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a279a318-6850-44fe-9ff0-f640e3e8870b" (UID: "a279a318-6850-44fe-9ff0-f640e3e8870b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:11:20 crc kubenswrapper[5003]: I0104 12:11:20.916865 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a279a318-6850-44fe-9ff0-f640e3e8870b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a279a318-6850-44fe-9ff0-f640e3e8870b" (UID: "a279a318-6850-44fe-9ff0-f640e3e8870b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:11:20 crc kubenswrapper[5003]: I0104 12:11:20.917214 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a279a318-6850-44fe-9ff0-f640e3e8870b-kube-api-access-swhs7" (OuterVolumeSpecName: "kube-api-access-swhs7") pod "a279a318-6850-44fe-9ff0-f640e3e8870b" (UID: "a279a318-6850-44fe-9ff0-f640e3e8870b"). InnerVolumeSpecName "kube-api-access-swhs7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:11:20 crc kubenswrapper[5003]: I0104 12:11:20.952791 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a279a318-6850-44fe-9ff0-f640e3e8870b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a279a318-6850-44fe-9ff0-f640e3e8870b" (UID: "a279a318-6850-44fe-9ff0-f640e3e8870b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.007407 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a279a318-6850-44fe-9ff0-f640e3e8870b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a279a318-6850-44fe-9ff0-f640e3e8870b" (UID: "a279a318-6850-44fe-9ff0-f640e3e8870b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.012344 5003 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a279a318-6850-44fe-9ff0-f640e3e8870b-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.012380 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a279a318-6850-44fe-9ff0-f640e3e8870b-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.012390 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a279a318-6850-44fe-9ff0-f640e3e8870b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.012404 5003 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a279a318-6850-44fe-9ff0-f640e3e8870b-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.012413 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-swhs7\" (UniqueName: \"kubernetes.io/projected/a279a318-6850-44fe-9ff0-f640e3e8870b-kube-api-access-swhs7\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.012425 5003 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a279a318-6850-44fe-9ff0-f640e3e8870b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:21 crc kubenswrapper[5003]: W0104 12:11:21.042772 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4d0624ef_34a3_4ea3_b7ac_1c8415a3080a.slice/crio-25867ccd846c6ed87392656ab51468805d768e9de309609dce08c61364d2d834 WatchSource:0}: Error finding container 25867ccd846c6ed87392656ab51468805d768e9de309609dce08c61364d2d834: Status 404 returned error can't find the container with id 25867ccd846c6ed87392656ab51468805d768e9de309609dce08c61364d2d834 Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.044589 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a279a318-6850-44fe-9ff0-f640e3e8870b-config-data" (OuterVolumeSpecName: "config-data") pod "a279a318-6850-44fe-9ff0-f640e3e8870b" (UID: "a279a318-6850-44fe-9ff0-f640e3e8870b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.045924 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rgh64"] Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.088264 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.088253 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a279a318-6850-44fe-9ff0-f640e3e8870b","Type":"ContainerDied","Data":"f0fbde16b426a0f2fd1c0f22b22bbdb72c59af935b0232858edd0386c9fdac45"} Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.088429 5003 scope.go:117] "RemoveContainer" containerID="c5e240e180d37f3b1229501e8405dc4f9e8dd86dfea3a3e1ccb3f308ae7a92e1" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.093147 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"28122a6a-8b54-4ff3-9092-a1f7439a35cf","Type":"ContainerStarted","Data":"716e1877196d93764346bb1a22d3036cf3d4fe6efcbf6126e0f1878e472f9fc4"} Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.093836 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.095303 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rgh64" event={"ID":"4d0624ef-34a3-4ea3-b7ac-1c8415a3080a","Type":"ContainerStarted","Data":"25867ccd846c6ed87392656ab51468805d768e9de309609dce08c61364d2d834"} Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.114933 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a279a318-6850-44fe-9ff0-f640e3e8870b-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.126383 5003 scope.go:117] "RemoveContainer" containerID="14f9245badf6fb0e666d131fb16aacc80d191b676102d7d058c199c3934c3e62" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.136856 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.136832334 podStartE2EDuration="2.136832334s" podCreationTimestamp="2026-01-04 12:11:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:11:21.117754136 +0000 UTC m=+1396.590783977" watchObservedRunningTime="2026-01-04 12:11:21.136832334 +0000 UTC m=+1396.609862175" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.143415 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.173518 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.180550 5003 scope.go:117] "RemoveContainer" containerID="8886e0536a13c8425258493d8c22c414d683cde3a556de7b51afee10b64e3337" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.190209 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:11:21 crc kubenswrapper[5003]: E0104 12:11:21.190809 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a279a318-6850-44fe-9ff0-f640e3e8870b" containerName="proxy-httpd" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.190830 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="a279a318-6850-44fe-9ff0-f640e3e8870b" containerName="proxy-httpd" Jan 04 12:11:21 crc kubenswrapper[5003]: E0104 12:11:21.190856 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a279a318-6850-44fe-9ff0-f640e3e8870b" containerName="ceilometer-notification-agent" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.190866 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="a279a318-6850-44fe-9ff0-f640e3e8870b" containerName="ceilometer-notification-agent" Jan 04 12:11:21 crc kubenswrapper[5003]: E0104 12:11:21.190893 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a279a318-6850-44fe-9ff0-f640e3e8870b" containerName="sg-core" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.190902 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="a279a318-6850-44fe-9ff0-f640e3e8870b" containerName="sg-core" Jan 04 12:11:21 crc kubenswrapper[5003]: E0104 12:11:21.190924 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a279a318-6850-44fe-9ff0-f640e3e8870b" containerName="ceilometer-central-agent" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.190933 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="a279a318-6850-44fe-9ff0-f640e3e8870b" containerName="ceilometer-central-agent" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.191205 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="a279a318-6850-44fe-9ff0-f640e3e8870b" containerName="ceilometer-notification-agent" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.191231 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="a279a318-6850-44fe-9ff0-f640e3e8870b" containerName="sg-core" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.191249 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="a279a318-6850-44fe-9ff0-f640e3e8870b" containerName="proxy-httpd" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.191265 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="a279a318-6850-44fe-9ff0-f640e3e8870b" containerName="ceilometer-central-agent" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.193601 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.197301 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.197556 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.198915 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.239090 5003 scope.go:117] "RemoveContainer" containerID="9382ae95275d5d300c14fcc11440971c1c5a41567731dfa2759ead9813a306cf" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.318118 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c05451f7-8c2b-4728-8e6d-d9956df16ebd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\") " pod="openstack/ceilometer-0" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.318197 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c05451f7-8c2b-4728-8e6d-d9956df16ebd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\") " pod="openstack/ceilometer-0" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.318397 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfn4j\" (UniqueName: \"kubernetes.io/projected/c05451f7-8c2b-4728-8e6d-d9956df16ebd-kube-api-access-zfn4j\") pod \"ceilometer-0\" (UID: \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\") " pod="openstack/ceilometer-0" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.318521 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c05451f7-8c2b-4728-8e6d-d9956df16ebd-scripts\") pod \"ceilometer-0\" (UID: \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\") " pod="openstack/ceilometer-0" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.318552 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c05451f7-8c2b-4728-8e6d-d9956df16ebd-log-httpd\") pod \"ceilometer-0\" (UID: \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\") " pod="openstack/ceilometer-0" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.318782 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c05451f7-8c2b-4728-8e6d-d9956df16ebd-run-httpd\") pod \"ceilometer-0\" (UID: \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\") " pod="openstack/ceilometer-0" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.318859 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c05451f7-8c2b-4728-8e6d-d9956df16ebd-config-data\") pod \"ceilometer-0\" (UID: \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\") " pod="openstack/ceilometer-0" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.420465 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c05451f7-8c2b-4728-8e6d-d9956df16ebd-config-data\") pod \"ceilometer-0\" (UID: \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\") " pod="openstack/ceilometer-0" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.420527 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c05451f7-8c2b-4728-8e6d-d9956df16ebd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\") " pod="openstack/ceilometer-0" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.420576 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c05451f7-8c2b-4728-8e6d-d9956df16ebd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\") " pod="openstack/ceilometer-0" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.420640 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfn4j\" (UniqueName: \"kubernetes.io/projected/c05451f7-8c2b-4728-8e6d-d9956df16ebd-kube-api-access-zfn4j\") pod \"ceilometer-0\" (UID: \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\") " pod="openstack/ceilometer-0" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.420689 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c05451f7-8c2b-4728-8e6d-d9956df16ebd-scripts\") pod \"ceilometer-0\" (UID: \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\") " pod="openstack/ceilometer-0" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.420711 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c05451f7-8c2b-4728-8e6d-d9956df16ebd-log-httpd\") pod \"ceilometer-0\" (UID: \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\") " pod="openstack/ceilometer-0" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.420809 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c05451f7-8c2b-4728-8e6d-d9956df16ebd-run-httpd\") pod \"ceilometer-0\" (UID: \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\") " pod="openstack/ceilometer-0" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.421299 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c05451f7-8c2b-4728-8e6d-d9956df16ebd-run-httpd\") pod \"ceilometer-0\" (UID: \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\") " pod="openstack/ceilometer-0" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.421704 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c05451f7-8c2b-4728-8e6d-d9956df16ebd-log-httpd\") pod \"ceilometer-0\" (UID: \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\") " pod="openstack/ceilometer-0" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.425879 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c05451f7-8c2b-4728-8e6d-d9956df16ebd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\") " pod="openstack/ceilometer-0" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.426843 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c05451f7-8c2b-4728-8e6d-d9956df16ebd-scripts\") pod \"ceilometer-0\" (UID: \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\") " pod="openstack/ceilometer-0" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.426922 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c05451f7-8c2b-4728-8e6d-d9956df16ebd-config-data\") pod \"ceilometer-0\" (UID: \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\") " pod="openstack/ceilometer-0" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.427649 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c05451f7-8c2b-4728-8e6d-d9956df16ebd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\") " pod="openstack/ceilometer-0" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.439641 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfn4j\" (UniqueName: \"kubernetes.io/projected/c05451f7-8c2b-4728-8e6d-d9956df16ebd-kube-api-access-zfn4j\") pod \"ceilometer-0\" (UID: \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\") " pod="openstack/ceilometer-0" Jan 04 12:11:21 crc kubenswrapper[5003]: I0104 12:11:21.528681 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:11:22 crc kubenswrapper[5003]: I0104 12:11:22.035946 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:11:22 crc kubenswrapper[5003]: I0104 12:11:22.114070 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c05451f7-8c2b-4728-8e6d-d9956df16ebd","Type":"ContainerStarted","Data":"796de35bb84521a8afb378e3c32ef03571092717776b626d734f48b77949cba2"} Jan 04 12:11:22 crc kubenswrapper[5003]: I0104 12:11:22.115938 5003 generic.go:334] "Generic (PLEG): container finished" podID="4d0624ef-34a3-4ea3-b7ac-1c8415a3080a" containerID="8948d796b917674a577983e425464ae342b79f7ab015a92cb9b0d8279ae86853" exitCode=0 Jan 04 12:11:22 crc kubenswrapper[5003]: I0104 12:11:22.116250 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rgh64" event={"ID":"4d0624ef-34a3-4ea3-b7ac-1c8415a3080a","Type":"ContainerDied","Data":"8948d796b917674a577983e425464ae342b79f7ab015a92cb9b0d8279ae86853"} Jan 04 12:11:22 crc kubenswrapper[5003]: I0104 12:11:22.816336 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a279a318-6850-44fe-9ff0-f640e3e8870b" path="/var/lib/kubelet/pods/a279a318-6850-44fe-9ff0-f640e3e8870b/volumes" Jan 04 12:11:23 crc kubenswrapper[5003]: I0104 12:11:23.136170 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c05451f7-8c2b-4728-8e6d-d9956df16ebd","Type":"ContainerStarted","Data":"19831bc82aedf4a5a68057a966e101ef67c5e5bdc7fef9013287957680888035"} Jan 04 12:11:24 crc kubenswrapper[5003]: I0104 12:11:24.146631 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c05451f7-8c2b-4728-8e6d-d9956df16ebd","Type":"ContainerStarted","Data":"5de1ebf0b9b9ce6b86770e193625bfa19dfa1e0a25a64985812f99aa5f3cee1a"} Jan 04 12:11:24 crc kubenswrapper[5003]: I0104 12:11:24.148681 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rgh64" event={"ID":"4d0624ef-34a3-4ea3-b7ac-1c8415a3080a","Type":"ContainerStarted","Data":"97470e8d3ac5ad6ac580afac5a5c13df77ee58798c55318d8c55408595c0ff4b"} Jan 04 12:11:25 crc kubenswrapper[5003]: I0104 12:11:25.179729 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c05451f7-8c2b-4728-8e6d-d9956df16ebd","Type":"ContainerStarted","Data":"b3b378c788720fc3878b3fcf89010faf136469ee2a059141b5912b5a975fb5d5"} Jan 04 12:11:26 crc kubenswrapper[5003]: I0104 12:11:26.193917 5003 generic.go:334] "Generic (PLEG): container finished" podID="4d0624ef-34a3-4ea3-b7ac-1c8415a3080a" containerID="97470e8d3ac5ad6ac580afac5a5c13df77ee58798c55318d8c55408595c0ff4b" exitCode=0 Jan 04 12:11:26 crc kubenswrapper[5003]: I0104 12:11:26.194045 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rgh64" event={"ID":"4d0624ef-34a3-4ea3-b7ac-1c8415a3080a","Type":"ContainerDied","Data":"97470e8d3ac5ad6ac580afac5a5c13df77ee58798c55318d8c55408595c0ff4b"} Jan 04 12:11:28 crc kubenswrapper[5003]: I0104 12:11:28.225130 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rgh64" event={"ID":"4d0624ef-34a3-4ea3-b7ac-1c8415a3080a","Type":"ContainerStarted","Data":"f192bada85465290612f3b88d09a2afbf10bc142ece6aecc537bae40f6c9c1a3"} Jan 04 12:11:28 crc kubenswrapper[5003]: I0104 12:11:28.227693 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c05451f7-8c2b-4728-8e6d-d9956df16ebd","Type":"ContainerStarted","Data":"5337a9074cbf90bdff1ef89a2e21dcda111864feaf03c542a0f159064bffe1c9"} Jan 04 12:11:28 crc kubenswrapper[5003]: I0104 12:11:28.228048 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 04 12:11:28 crc kubenswrapper[5003]: I0104 12:11:28.255423 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rgh64" podStartSLOduration=3.288411656 podStartE2EDuration="8.25539849s" podCreationTimestamp="2026-01-04 12:11:20 +0000 UTC" firstStartedPulling="2026-01-04 12:11:22.146847212 +0000 UTC m=+1397.619877053" lastFinishedPulling="2026-01-04 12:11:27.113834046 +0000 UTC m=+1402.586863887" observedRunningTime="2026-01-04 12:11:28.244460164 +0000 UTC m=+1403.717490015" watchObservedRunningTime="2026-01-04 12:11:28.25539849 +0000 UTC m=+1403.728428331" Jan 04 12:11:28 crc kubenswrapper[5003]: I0104 12:11:28.275505 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.358688753 podStartE2EDuration="7.275479445s" podCreationTimestamp="2026-01-04 12:11:21 +0000 UTC" firstStartedPulling="2026-01-04 12:11:22.039329132 +0000 UTC m=+1397.512358973" lastFinishedPulling="2026-01-04 12:11:26.956119824 +0000 UTC m=+1402.429149665" observedRunningTime="2026-01-04 12:11:28.267571488 +0000 UTC m=+1403.740601329" watchObservedRunningTime="2026-01-04 12:11:28.275479445 +0000 UTC m=+1403.748509286" Jan 04 12:11:29 crc kubenswrapper[5003]: I0104 12:11:29.676190 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.192359 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-jmh76"] Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.194721 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-jmh76" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.204653 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.205420 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.236813 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-jmh76"] Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.326681 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dvktj\" (UniqueName: \"kubernetes.io/projected/3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2-kube-api-access-dvktj\") pod \"nova-cell0-cell-mapping-jmh76\" (UID: \"3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2\") " pod="openstack/nova-cell0-cell-mapping-jmh76" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.326807 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2-config-data\") pod \"nova-cell0-cell-mapping-jmh76\" (UID: \"3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2\") " pod="openstack/nova-cell0-cell-mapping-jmh76" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.326841 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2-scripts\") pod \"nova-cell0-cell-mapping-jmh76\" (UID: \"3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2\") " pod="openstack/nova-cell0-cell-mapping-jmh76" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.326879 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-jmh76\" (UID: \"3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2\") " pod="openstack/nova-cell0-cell-mapping-jmh76" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.378348 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.379757 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.391000 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.424286 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.434246 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vg5gg\" (UniqueName: \"kubernetes.io/projected/3e8376ba-e98a-4912-abde-994448dafc7e-kube-api-access-vg5gg\") pod \"nova-scheduler-0\" (UID: \"3e8376ba-e98a-4912-abde-994448dafc7e\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.434399 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e8376ba-e98a-4912-abde-994448dafc7e-config-data\") pod \"nova-scheduler-0\" (UID: \"3e8376ba-e98a-4912-abde-994448dafc7e\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.434486 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dvktj\" (UniqueName: \"kubernetes.io/projected/3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2-kube-api-access-dvktj\") pod \"nova-cell0-cell-mapping-jmh76\" (UID: \"3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2\") " pod="openstack/nova-cell0-cell-mapping-jmh76" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.434638 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2-config-data\") pod \"nova-cell0-cell-mapping-jmh76\" (UID: \"3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2\") " pod="openstack/nova-cell0-cell-mapping-jmh76" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.434706 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2-scripts\") pod \"nova-cell0-cell-mapping-jmh76\" (UID: \"3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2\") " pod="openstack/nova-cell0-cell-mapping-jmh76" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.434736 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-jmh76\" (UID: \"3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2\") " pod="openstack/nova-cell0-cell-mapping-jmh76" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.434798 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e8376ba-e98a-4912-abde-994448dafc7e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3e8376ba-e98a-4912-abde-994448dafc7e\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.442424 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2-scripts\") pod \"nova-cell0-cell-mapping-jmh76\" (UID: \"3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2\") " pod="openstack/nova-cell0-cell-mapping-jmh76" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.450790 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2-config-data\") pod \"nova-cell0-cell-mapping-jmh76\" (UID: \"3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2\") " pod="openstack/nova-cell0-cell-mapping-jmh76" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.463929 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dvktj\" (UniqueName: \"kubernetes.io/projected/3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2-kube-api-access-dvktj\") pod \"nova-cell0-cell-mapping-jmh76\" (UID: \"3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2\") " pod="openstack/nova-cell0-cell-mapping-jmh76" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.470180 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.472838 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.477872 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rgh64" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.480967 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rgh64" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.490429 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.495627 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.503446 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-jmh76\" (UID: \"3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2\") " pod="openstack/nova-cell0-cell-mapping-jmh76" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.533615 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-jmh76" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.536525 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8aded6ca-c07f-4915-9953-d774d9a7de8a-logs\") pod \"nova-api-0\" (UID: \"8aded6ca-c07f-4915-9953-d774d9a7de8a\") " pod="openstack/nova-api-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.536603 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v85s7\" (UniqueName: \"kubernetes.io/projected/8aded6ca-c07f-4915-9953-d774d9a7de8a-kube-api-access-v85s7\") pod \"nova-api-0\" (UID: \"8aded6ca-c07f-4915-9953-d774d9a7de8a\") " pod="openstack/nova-api-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.536698 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8aded6ca-c07f-4915-9953-d774d9a7de8a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8aded6ca-c07f-4915-9953-d774d9a7de8a\") " pod="openstack/nova-api-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.536781 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e8376ba-e98a-4912-abde-994448dafc7e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3e8376ba-e98a-4912-abde-994448dafc7e\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.537834 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vg5gg\" (UniqueName: \"kubernetes.io/projected/3e8376ba-e98a-4912-abde-994448dafc7e-kube-api-access-vg5gg\") pod \"nova-scheduler-0\" (UID: \"3e8376ba-e98a-4912-abde-994448dafc7e\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.537995 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e8376ba-e98a-4912-abde-994448dafc7e-config-data\") pod \"nova-scheduler-0\" (UID: \"3e8376ba-e98a-4912-abde-994448dafc7e\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.538139 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8aded6ca-c07f-4915-9953-d774d9a7de8a-config-data\") pod \"nova-api-0\" (UID: \"8aded6ca-c07f-4915-9953-d774d9a7de8a\") " pod="openstack/nova-api-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.541566 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e8376ba-e98a-4912-abde-994448dafc7e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3e8376ba-e98a-4912-abde-994448dafc7e\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.588607 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e8376ba-e98a-4912-abde-994448dafc7e-config-data\") pod \"nova-scheduler-0\" (UID: \"3e8376ba-e98a-4912-abde-994448dafc7e\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.612977 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vg5gg\" (UniqueName: \"kubernetes.io/projected/3e8376ba-e98a-4912-abde-994448dafc7e-kube-api-access-vg5gg\") pod \"nova-scheduler-0\" (UID: \"3e8376ba-e98a-4912-abde-994448dafc7e\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.643115 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8aded6ca-c07f-4915-9953-d774d9a7de8a-config-data\") pod \"nova-api-0\" (UID: \"8aded6ca-c07f-4915-9953-d774d9a7de8a\") " pod="openstack/nova-api-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.643189 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8aded6ca-c07f-4915-9953-d774d9a7de8a-logs\") pod \"nova-api-0\" (UID: \"8aded6ca-c07f-4915-9953-d774d9a7de8a\") " pod="openstack/nova-api-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.643213 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v85s7\" (UniqueName: \"kubernetes.io/projected/8aded6ca-c07f-4915-9953-d774d9a7de8a-kube-api-access-v85s7\") pod \"nova-api-0\" (UID: \"8aded6ca-c07f-4915-9953-d774d9a7de8a\") " pod="openstack/nova-api-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.643259 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8aded6ca-c07f-4915-9953-d774d9a7de8a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8aded6ca-c07f-4915-9953-d774d9a7de8a\") " pod="openstack/nova-api-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.645501 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8aded6ca-c07f-4915-9953-d774d9a7de8a-logs\") pod \"nova-api-0\" (UID: \"8aded6ca-c07f-4915-9953-d774d9a7de8a\") " pod="openstack/nova-api-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.672682 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8aded6ca-c07f-4915-9953-d774d9a7de8a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8aded6ca-c07f-4915-9953-d774d9a7de8a\") " pod="openstack/nova-api-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.674390 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8aded6ca-c07f-4915-9953-d774d9a7de8a-config-data\") pod \"nova-api-0\" (UID: \"8aded6ca-c07f-4915-9953-d774d9a7de8a\") " pod="openstack/nova-api-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.692345 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.693510 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v85s7\" (UniqueName: \"kubernetes.io/projected/8aded6ca-c07f-4915-9953-d774d9a7de8a-kube-api-access-v85s7\") pod \"nova-api-0\" (UID: \"8aded6ca-c07f-4915-9953-d774d9a7de8a\") " pod="openstack/nova-api-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.695461 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.705419 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.706612 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.717439 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.775280 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.854757 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tq8p2\" (UniqueName: \"kubernetes.io/projected/deea6419-c73a-47bb-a4da-9e3cbc80fb5a-kube-api-access-tq8p2\") pod \"nova-metadata-0\" (UID: \"deea6419-c73a-47bb-a4da-9e3cbc80fb5a\") " pod="openstack/nova-metadata-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.854832 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/deea6419-c73a-47bb-a4da-9e3cbc80fb5a-config-data\") pod \"nova-metadata-0\" (UID: \"deea6419-c73a-47bb-a4da-9e3cbc80fb5a\") " pod="openstack/nova-metadata-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.854879 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/deea6419-c73a-47bb-a4da-9e3cbc80fb5a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"deea6419-c73a-47bb-a4da-9e3cbc80fb5a\") " pod="openstack/nova-metadata-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.854929 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/deea6419-c73a-47bb-a4da-9e3cbc80fb5a-logs\") pod \"nova-metadata-0\" (UID: \"deea6419-c73a-47bb-a4da-9e3cbc80fb5a\") " pod="openstack/nova-metadata-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.871453 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.883617 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.899472 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.930665 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-647df7b8c5-wg2m4"] Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.932310 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-647df7b8c5-wg2m4" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.959734 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tq8p2\" (UniqueName: \"kubernetes.io/projected/deea6419-c73a-47bb-a4da-9e3cbc80fb5a-kube-api-access-tq8p2\") pod \"nova-metadata-0\" (UID: \"deea6419-c73a-47bb-a4da-9e3cbc80fb5a\") " pod="openstack/nova-metadata-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.959892 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-47bjd\" (UniqueName: \"kubernetes.io/projected/10ac1247-3e98-41cd-84f5-52112345b993-kube-api-access-47bjd\") pod \"nova-cell1-novncproxy-0\" (UID: \"10ac1247-3e98-41cd-84f5-52112345b993\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.959935 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/deea6419-c73a-47bb-a4da-9e3cbc80fb5a-config-data\") pod \"nova-metadata-0\" (UID: \"deea6419-c73a-47bb-a4da-9e3cbc80fb5a\") " pod="openstack/nova-metadata-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.960036 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/deea6419-c73a-47bb-a4da-9e3cbc80fb5a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"deea6419-c73a-47bb-a4da-9e3cbc80fb5a\") " pod="openstack/nova-metadata-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.960213 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/deea6419-c73a-47bb-a4da-9e3cbc80fb5a-logs\") pod \"nova-metadata-0\" (UID: \"deea6419-c73a-47bb-a4da-9e3cbc80fb5a\") " pod="openstack/nova-metadata-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.960285 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10ac1247-3e98-41cd-84f5-52112345b993-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"10ac1247-3e98-41cd-84f5-52112345b993\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.960475 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10ac1247-3e98-41cd-84f5-52112345b993-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"10ac1247-3e98-41cd-84f5-52112345b993\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.961295 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/deea6419-c73a-47bb-a4da-9e3cbc80fb5a-logs\") pod \"nova-metadata-0\" (UID: \"deea6419-c73a-47bb-a4da-9e3cbc80fb5a\") " pod="openstack/nova-metadata-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.969925 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/deea6419-c73a-47bb-a4da-9e3cbc80fb5a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"deea6419-c73a-47bb-a4da-9e3cbc80fb5a\") " pod="openstack/nova-metadata-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.969993 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.978208 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/deea6419-c73a-47bb-a4da-9e3cbc80fb5a-config-data\") pod \"nova-metadata-0\" (UID: \"deea6419-c73a-47bb-a4da-9e3cbc80fb5a\") " pod="openstack/nova-metadata-0" Jan 04 12:11:30 crc kubenswrapper[5003]: I0104 12:11:30.979275 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-647df7b8c5-wg2m4"] Jan 04 12:11:31 crc kubenswrapper[5003]: I0104 12:11:31.027651 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tq8p2\" (UniqueName: \"kubernetes.io/projected/deea6419-c73a-47bb-a4da-9e3cbc80fb5a-kube-api-access-tq8p2\") pod \"nova-metadata-0\" (UID: \"deea6419-c73a-47bb-a4da-9e3cbc80fb5a\") " pod="openstack/nova-metadata-0" Jan 04 12:11:31 crc kubenswrapper[5003]: I0104 12:11:31.079181 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-ovsdbserver-sb\") pod \"dnsmasq-dns-647df7b8c5-wg2m4\" (UID: \"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4\") " pod="openstack/dnsmasq-dns-647df7b8c5-wg2m4" Jan 04 12:11:31 crc kubenswrapper[5003]: I0104 12:11:31.079317 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kv7t9\" (UniqueName: \"kubernetes.io/projected/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-kube-api-access-kv7t9\") pod \"dnsmasq-dns-647df7b8c5-wg2m4\" (UID: \"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4\") " pod="openstack/dnsmasq-dns-647df7b8c5-wg2m4" Jan 04 12:11:31 crc kubenswrapper[5003]: I0104 12:11:31.079377 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-47bjd\" (UniqueName: \"kubernetes.io/projected/10ac1247-3e98-41cd-84f5-52112345b993-kube-api-access-47bjd\") pod \"nova-cell1-novncproxy-0\" (UID: \"10ac1247-3e98-41cd-84f5-52112345b993\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:31 crc kubenswrapper[5003]: I0104 12:11:31.079439 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-dns-swift-storage-0\") pod \"dnsmasq-dns-647df7b8c5-wg2m4\" (UID: \"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4\") " pod="openstack/dnsmasq-dns-647df7b8c5-wg2m4" Jan 04 12:11:31 crc kubenswrapper[5003]: I0104 12:11:31.079482 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-dns-svc\") pod \"dnsmasq-dns-647df7b8c5-wg2m4\" (UID: \"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4\") " pod="openstack/dnsmasq-dns-647df7b8c5-wg2m4" Jan 04 12:11:31 crc kubenswrapper[5003]: I0104 12:11:31.079520 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10ac1247-3e98-41cd-84f5-52112345b993-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"10ac1247-3e98-41cd-84f5-52112345b993\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:31 crc kubenswrapper[5003]: I0104 12:11:31.079586 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-config\") pod \"dnsmasq-dns-647df7b8c5-wg2m4\" (UID: \"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4\") " pod="openstack/dnsmasq-dns-647df7b8c5-wg2m4" Jan 04 12:11:31 crc kubenswrapper[5003]: I0104 12:11:31.079616 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-ovsdbserver-nb\") pod \"dnsmasq-dns-647df7b8c5-wg2m4\" (UID: \"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4\") " pod="openstack/dnsmasq-dns-647df7b8c5-wg2m4" Jan 04 12:11:31 crc kubenswrapper[5003]: I0104 12:11:31.079648 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10ac1247-3e98-41cd-84f5-52112345b993-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"10ac1247-3e98-41cd-84f5-52112345b993\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:31 crc kubenswrapper[5003]: I0104 12:11:31.088463 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 04 12:11:31 crc kubenswrapper[5003]: I0104 12:11:31.111926 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10ac1247-3e98-41cd-84f5-52112345b993-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"10ac1247-3e98-41cd-84f5-52112345b993\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:31 crc kubenswrapper[5003]: I0104 12:11:31.135931 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-47bjd\" (UniqueName: \"kubernetes.io/projected/10ac1247-3e98-41cd-84f5-52112345b993-kube-api-access-47bjd\") pod \"nova-cell1-novncproxy-0\" (UID: \"10ac1247-3e98-41cd-84f5-52112345b993\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:31 crc kubenswrapper[5003]: I0104 12:11:31.150811 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10ac1247-3e98-41cd-84f5-52112345b993-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"10ac1247-3e98-41cd-84f5-52112345b993\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:31 crc kubenswrapper[5003]: I0104 12:11:31.182211 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kv7t9\" (UniqueName: \"kubernetes.io/projected/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-kube-api-access-kv7t9\") pod \"dnsmasq-dns-647df7b8c5-wg2m4\" (UID: \"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4\") " pod="openstack/dnsmasq-dns-647df7b8c5-wg2m4" Jan 04 12:11:31 crc kubenswrapper[5003]: I0104 12:11:31.182348 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-dns-swift-storage-0\") pod \"dnsmasq-dns-647df7b8c5-wg2m4\" (UID: \"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4\") " pod="openstack/dnsmasq-dns-647df7b8c5-wg2m4" Jan 04 12:11:31 crc kubenswrapper[5003]: I0104 12:11:31.182400 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-dns-svc\") pod \"dnsmasq-dns-647df7b8c5-wg2m4\" (UID: \"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4\") " pod="openstack/dnsmasq-dns-647df7b8c5-wg2m4" Jan 04 12:11:31 crc kubenswrapper[5003]: I0104 12:11:31.182486 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-config\") pod \"dnsmasq-dns-647df7b8c5-wg2m4\" (UID: \"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4\") " pod="openstack/dnsmasq-dns-647df7b8c5-wg2m4" Jan 04 12:11:31 crc kubenswrapper[5003]: I0104 12:11:31.182509 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-ovsdbserver-nb\") pod \"dnsmasq-dns-647df7b8c5-wg2m4\" (UID: \"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4\") " pod="openstack/dnsmasq-dns-647df7b8c5-wg2m4" Jan 04 12:11:31 crc kubenswrapper[5003]: I0104 12:11:31.182550 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-ovsdbserver-sb\") pod \"dnsmasq-dns-647df7b8c5-wg2m4\" (UID: \"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4\") " pod="openstack/dnsmasq-dns-647df7b8c5-wg2m4" Jan 04 12:11:31 crc kubenswrapper[5003]: I0104 12:11:31.184077 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-ovsdbserver-sb\") pod \"dnsmasq-dns-647df7b8c5-wg2m4\" (UID: \"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4\") " pod="openstack/dnsmasq-dns-647df7b8c5-wg2m4" Jan 04 12:11:31 crc kubenswrapper[5003]: I0104 12:11:31.184279 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-dns-svc\") pod \"dnsmasq-dns-647df7b8c5-wg2m4\" (UID: \"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4\") " pod="openstack/dnsmasq-dns-647df7b8c5-wg2m4" Jan 04 12:11:31 crc kubenswrapper[5003]: I0104 12:11:31.184688 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-config\") pod \"dnsmasq-dns-647df7b8c5-wg2m4\" (UID: \"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4\") " pod="openstack/dnsmasq-dns-647df7b8c5-wg2m4" Jan 04 12:11:31 crc kubenswrapper[5003]: I0104 12:11:31.185233 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-dns-swift-storage-0\") pod \"dnsmasq-dns-647df7b8c5-wg2m4\" (UID: \"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4\") " pod="openstack/dnsmasq-dns-647df7b8c5-wg2m4" Jan 04 12:11:31 crc kubenswrapper[5003]: I0104 12:11:31.185479 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-ovsdbserver-nb\") pod \"dnsmasq-dns-647df7b8c5-wg2m4\" (UID: \"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4\") " pod="openstack/dnsmasq-dns-647df7b8c5-wg2m4" Jan 04 12:11:31 crc kubenswrapper[5003]: I0104 12:11:31.222004 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kv7t9\" (UniqueName: \"kubernetes.io/projected/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-kube-api-access-kv7t9\") pod \"dnsmasq-dns-647df7b8c5-wg2m4\" (UID: \"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4\") " pod="openstack/dnsmasq-dns-647df7b8c5-wg2m4" Jan 04 12:11:31 crc kubenswrapper[5003]: I0104 12:11:31.247875 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:31 crc kubenswrapper[5003]: I0104 12:11:31.319753 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-647df7b8c5-wg2m4" Jan 04 12:11:31 crc kubenswrapper[5003]: I0104 12:11:31.722532 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-rgh64" podUID="4d0624ef-34a3-4ea3-b7ac-1c8415a3080a" containerName="registry-server" probeResult="failure" output=< Jan 04 12:11:31 crc kubenswrapper[5003]: timeout: failed to connect service ":50051" within 1s Jan 04 12:11:31 crc kubenswrapper[5003]: > Jan 04 12:11:31 crc kubenswrapper[5003]: I0104 12:11:31.753954 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-jmh76"] Jan 04 12:11:31 crc kubenswrapper[5003]: I0104 12:11:31.819400 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:11:31 crc kubenswrapper[5003]: I0104 12:11:31.959958 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:11:32 crc kubenswrapper[5003]: I0104 12:11:32.105745 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-nb5kc"] Jan 04 12:11:32 crc kubenswrapper[5003]: I0104 12:11:32.107312 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-nb5kc" Jan 04 12:11:32 crc kubenswrapper[5003]: I0104 12:11:32.115655 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Jan 04 12:11:32 crc kubenswrapper[5003]: I0104 12:11:32.116623 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Jan 04 12:11:32 crc kubenswrapper[5003]: I0104 12:11:32.117151 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-nb5kc"] Jan 04 12:11:32 crc kubenswrapper[5003]: I0104 12:11:32.159052 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 04 12:11:32 crc kubenswrapper[5003]: I0104 12:11:32.180188 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:11:32 crc kubenswrapper[5003]: I0104 12:11:32.260403 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99bfe48b-6290-49a4-b08e-f81e305df2bc-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-nb5kc\" (UID: \"99bfe48b-6290-49a4-b08e-f81e305df2bc\") " pod="openstack/nova-cell1-conductor-db-sync-nb5kc" Jan 04 12:11:32 crc kubenswrapper[5003]: I0104 12:11:32.260526 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/99bfe48b-6290-49a4-b08e-f81e305df2bc-scripts\") pod \"nova-cell1-conductor-db-sync-nb5kc\" (UID: \"99bfe48b-6290-49a4-b08e-f81e305df2bc\") " pod="openstack/nova-cell1-conductor-db-sync-nb5kc" Jan 04 12:11:32 crc kubenswrapper[5003]: I0104 12:11:32.260571 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8sj8\" (UniqueName: \"kubernetes.io/projected/99bfe48b-6290-49a4-b08e-f81e305df2bc-kube-api-access-q8sj8\") pod \"nova-cell1-conductor-db-sync-nb5kc\" (UID: \"99bfe48b-6290-49a4-b08e-f81e305df2bc\") " pod="openstack/nova-cell1-conductor-db-sync-nb5kc" Jan 04 12:11:32 crc kubenswrapper[5003]: I0104 12:11:32.260606 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99bfe48b-6290-49a4-b08e-f81e305df2bc-config-data\") pod \"nova-cell1-conductor-db-sync-nb5kc\" (UID: \"99bfe48b-6290-49a4-b08e-f81e305df2bc\") " pod="openstack/nova-cell1-conductor-db-sync-nb5kc" Jan 04 12:11:32 crc kubenswrapper[5003]: I0104 12:11:32.330106 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"10ac1247-3e98-41cd-84f5-52112345b993","Type":"ContainerStarted","Data":"d46d7a3a379c33c7fe265a611d2f8fb23aa4a8aa22384dbeeb96985f82fea69c"} Jan 04 12:11:32 crc kubenswrapper[5003]: I0104 12:11:32.330933 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8aded6ca-c07f-4915-9953-d774d9a7de8a","Type":"ContainerStarted","Data":"0aaf836633f71bc7a055f5fbd19828d291d9cecb62d0c788af0525b927955626"} Jan 04 12:11:32 crc kubenswrapper[5003]: I0104 12:11:32.332056 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-jmh76" event={"ID":"3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2","Type":"ContainerStarted","Data":"57cbe7cb23145556ede2cb126df0b6f2b4320f9a6656889cbe5ab3eb6dd0b2cf"} Jan 04 12:11:32 crc kubenswrapper[5003]: I0104 12:11:32.332079 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-jmh76" event={"ID":"3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2","Type":"ContainerStarted","Data":"75506db27f84a317b5c5cc8525f9656c44c0fbed000399f941156405fdde9a1b"} Jan 04 12:11:32 crc kubenswrapper[5003]: I0104 12:11:32.333955 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"deea6419-c73a-47bb-a4da-9e3cbc80fb5a","Type":"ContainerStarted","Data":"16ba24c25a0c73bb1678582f320141a717f8e6661c2d4e946c9068805101831e"} Jan 04 12:11:32 crc kubenswrapper[5003]: I0104 12:11:32.334764 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3e8376ba-e98a-4912-abde-994448dafc7e","Type":"ContainerStarted","Data":"45e170aa2137a457962fbe440870f670272501978687193c1257564b67a188d9"} Jan 04 12:11:32 crc kubenswrapper[5003]: I0104 12:11:32.362149 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99bfe48b-6290-49a4-b08e-f81e305df2bc-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-nb5kc\" (UID: \"99bfe48b-6290-49a4-b08e-f81e305df2bc\") " pod="openstack/nova-cell1-conductor-db-sync-nb5kc" Jan 04 12:11:32 crc kubenswrapper[5003]: I0104 12:11:32.362280 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/99bfe48b-6290-49a4-b08e-f81e305df2bc-scripts\") pod \"nova-cell1-conductor-db-sync-nb5kc\" (UID: \"99bfe48b-6290-49a4-b08e-f81e305df2bc\") " pod="openstack/nova-cell1-conductor-db-sync-nb5kc" Jan 04 12:11:32 crc kubenswrapper[5003]: I0104 12:11:32.362325 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8sj8\" (UniqueName: \"kubernetes.io/projected/99bfe48b-6290-49a4-b08e-f81e305df2bc-kube-api-access-q8sj8\") pod \"nova-cell1-conductor-db-sync-nb5kc\" (UID: \"99bfe48b-6290-49a4-b08e-f81e305df2bc\") " pod="openstack/nova-cell1-conductor-db-sync-nb5kc" Jan 04 12:11:32 crc kubenswrapper[5003]: I0104 12:11:32.362358 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99bfe48b-6290-49a4-b08e-f81e305df2bc-config-data\") pod \"nova-cell1-conductor-db-sync-nb5kc\" (UID: \"99bfe48b-6290-49a4-b08e-f81e305df2bc\") " pod="openstack/nova-cell1-conductor-db-sync-nb5kc" Jan 04 12:11:32 crc kubenswrapper[5003]: I0104 12:11:32.368136 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/99bfe48b-6290-49a4-b08e-f81e305df2bc-scripts\") pod \"nova-cell1-conductor-db-sync-nb5kc\" (UID: \"99bfe48b-6290-49a4-b08e-f81e305df2bc\") " pod="openstack/nova-cell1-conductor-db-sync-nb5kc" Jan 04 12:11:32 crc kubenswrapper[5003]: I0104 12:11:32.368686 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99bfe48b-6290-49a4-b08e-f81e305df2bc-config-data\") pod \"nova-cell1-conductor-db-sync-nb5kc\" (UID: \"99bfe48b-6290-49a4-b08e-f81e305df2bc\") " pod="openstack/nova-cell1-conductor-db-sync-nb5kc" Jan 04 12:11:32 crc kubenswrapper[5003]: I0104 12:11:32.370392 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99bfe48b-6290-49a4-b08e-f81e305df2bc-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-nb5kc\" (UID: \"99bfe48b-6290-49a4-b08e-f81e305df2bc\") " pod="openstack/nova-cell1-conductor-db-sync-nb5kc" Jan 04 12:11:32 crc kubenswrapper[5003]: I0104 12:11:32.402886 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8sj8\" (UniqueName: \"kubernetes.io/projected/99bfe48b-6290-49a4-b08e-f81e305df2bc-kube-api-access-q8sj8\") pod \"nova-cell1-conductor-db-sync-nb5kc\" (UID: \"99bfe48b-6290-49a4-b08e-f81e305df2bc\") " pod="openstack/nova-cell1-conductor-db-sync-nb5kc" Jan 04 12:11:32 crc kubenswrapper[5003]: I0104 12:11:32.426717 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-nb5kc" Jan 04 12:11:32 crc kubenswrapper[5003]: I0104 12:11:32.467754 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-jmh76" podStartSLOduration=2.467733511 podStartE2EDuration="2.467733511s" podCreationTimestamp="2026-01-04 12:11:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:11:32.393494561 +0000 UTC m=+1407.866524402" watchObservedRunningTime="2026-01-04 12:11:32.467733511 +0000 UTC m=+1407.940763362" Jan 04 12:11:32 crc kubenswrapper[5003]: I0104 12:11:32.472580 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-647df7b8c5-wg2m4"] Jan 04 12:11:33 crc kubenswrapper[5003]: I0104 12:11:33.003109 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-nb5kc"] Jan 04 12:11:33 crc kubenswrapper[5003]: I0104 12:11:33.358220 5003 generic.go:334] "Generic (PLEG): container finished" podID="5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4" containerID="25d3a76041f4786e42243668f2f97d5669cc3b3b9db2ef361de72a3e2ff1b53d" exitCode=0 Jan 04 12:11:33 crc kubenswrapper[5003]: I0104 12:11:33.359101 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-647df7b8c5-wg2m4" event={"ID":"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4","Type":"ContainerDied","Data":"25d3a76041f4786e42243668f2f97d5669cc3b3b9db2ef361de72a3e2ff1b53d"} Jan 04 12:11:33 crc kubenswrapper[5003]: I0104 12:11:33.359157 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-647df7b8c5-wg2m4" event={"ID":"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4","Type":"ContainerStarted","Data":"7343a2b31144245e84fd023eb5b79a90bc235cd6e618a177d31ce4099f41f72c"} Jan 04 12:11:33 crc kubenswrapper[5003]: I0104 12:11:33.366116 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-nb5kc" event={"ID":"99bfe48b-6290-49a4-b08e-f81e305df2bc","Type":"ContainerStarted","Data":"43bfec4bd608685be4573015f15db1873d268c8b7296b6e047586f2093d24609"} Jan 04 12:11:33 crc kubenswrapper[5003]: I0104 12:11:33.366163 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-nb5kc" event={"ID":"99bfe48b-6290-49a4-b08e-f81e305df2bc","Type":"ContainerStarted","Data":"751d6cd56dec714acab5e9c2aa926e32893b8d1f916a0e2419322db1b151ea0e"} Jan 04 12:11:33 crc kubenswrapper[5003]: I0104 12:11:33.416951 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-nb5kc" podStartSLOduration=1.416929479 podStartE2EDuration="1.416929479s" podCreationTimestamp="2026-01-04 12:11:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:11:33.401809953 +0000 UTC m=+1408.874839794" watchObservedRunningTime="2026-01-04 12:11:33.416929479 +0000 UTC m=+1408.889959320" Jan 04 12:11:34 crc kubenswrapper[5003]: I0104 12:11:34.381703 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-647df7b8c5-wg2m4" event={"ID":"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4","Type":"ContainerStarted","Data":"b5c68afcbdf36910913f01f74e14f8f3735e6d961665edf03efbc006b7d28267"} Jan 04 12:11:34 crc kubenswrapper[5003]: I0104 12:11:34.382096 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-647df7b8c5-wg2m4" Jan 04 12:11:34 crc kubenswrapper[5003]: I0104 12:11:34.406353 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-647df7b8c5-wg2m4" podStartSLOduration=4.406329687 podStartE2EDuration="4.406329687s" podCreationTimestamp="2026-01-04 12:11:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:11:34.398732949 +0000 UTC m=+1409.871762810" watchObservedRunningTime="2026-01-04 12:11:34.406329687 +0000 UTC m=+1409.879359528" Jan 04 12:11:34 crc kubenswrapper[5003]: I0104 12:11:34.724844 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:11:34 crc kubenswrapper[5003]: I0104 12:11:34.743358 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 04 12:11:37 crc kubenswrapper[5003]: I0104 12:11:37.419298 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"deea6419-c73a-47bb-a4da-9e3cbc80fb5a","Type":"ContainerStarted","Data":"676a8f2c452b1da7a61c38680f3abc1c005a5b1969b6ce761f246889cd41d850"} Jan 04 12:11:37 crc kubenswrapper[5003]: I0104 12:11:37.420525 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"deea6419-c73a-47bb-a4da-9e3cbc80fb5a","Type":"ContainerStarted","Data":"5c0e554922f948afccf01654900433bcb7efcd48170435468c066697370162b7"} Jan 04 12:11:37 crc kubenswrapper[5003]: I0104 12:11:37.419524 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="deea6419-c73a-47bb-a4da-9e3cbc80fb5a" containerName="nova-metadata-metadata" containerID="cri-o://676a8f2c452b1da7a61c38680f3abc1c005a5b1969b6ce761f246889cd41d850" gracePeriod=30 Jan 04 12:11:37 crc kubenswrapper[5003]: I0104 12:11:37.419502 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="deea6419-c73a-47bb-a4da-9e3cbc80fb5a" containerName="nova-metadata-log" containerID="cri-o://5c0e554922f948afccf01654900433bcb7efcd48170435468c066697370162b7" gracePeriod=30 Jan 04 12:11:37 crc kubenswrapper[5003]: I0104 12:11:37.427999 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3e8376ba-e98a-4912-abde-994448dafc7e","Type":"ContainerStarted","Data":"13c4fd594d53e19837682f482fbb54240d69f43b501ac7e99f8f2bb724133942"} Jan 04 12:11:37 crc kubenswrapper[5003]: I0104 12:11:37.455985 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"10ac1247-3e98-41cd-84f5-52112345b993","Type":"ContainerStarted","Data":"4c39555e2f186942918165550947f8c57a1da363df377475bb6d2d0672fd5ebd"} Jan 04 12:11:37 crc kubenswrapper[5003]: I0104 12:11:37.457864 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="10ac1247-3e98-41cd-84f5-52112345b993" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://4c39555e2f186942918165550947f8c57a1da363df377475bb6d2d0672fd5ebd" gracePeriod=30 Jan 04 12:11:37 crc kubenswrapper[5003]: I0104 12:11:37.464972 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8aded6ca-c07f-4915-9953-d774d9a7de8a","Type":"ContainerStarted","Data":"bfd56bd97860c6de31d1424c736fd93990f98a8f0d0239d0ebabbe7f8f754bc5"} Jan 04 12:11:37 crc kubenswrapper[5003]: I0104 12:11:37.465058 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8aded6ca-c07f-4915-9953-d774d9a7de8a","Type":"ContainerStarted","Data":"65b2b0fcfd292709c290c173cd35333a5957aa08976e557cd1eef0862f7d7596"} Jan 04 12:11:37 crc kubenswrapper[5003]: I0104 12:11:37.473408 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.74030317 podStartE2EDuration="7.473246112s" podCreationTimestamp="2026-01-04 12:11:30 +0000 UTC" firstStartedPulling="2026-01-04 12:11:32.203274679 +0000 UTC m=+1407.676304520" lastFinishedPulling="2026-01-04 12:11:35.936217621 +0000 UTC m=+1411.409247462" observedRunningTime="2026-01-04 12:11:37.456590776 +0000 UTC m=+1412.929620617" watchObservedRunningTime="2026-01-04 12:11:37.473246112 +0000 UTC m=+1412.946275963" Jan 04 12:11:37 crc kubenswrapper[5003]: I0104 12:11:37.515547 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.5531922099999997 podStartE2EDuration="7.515515727s" podCreationTimestamp="2026-01-04 12:11:30 +0000 UTC" firstStartedPulling="2026-01-04 12:11:31.986290258 +0000 UTC m=+1407.459320089" lastFinishedPulling="2026-01-04 12:11:35.948613765 +0000 UTC m=+1411.421643606" observedRunningTime="2026-01-04 12:11:37.4819798 +0000 UTC m=+1412.955009651" watchObservedRunningTime="2026-01-04 12:11:37.515515727 +0000 UTC m=+1412.988545578" Jan 04 12:11:37 crc kubenswrapper[5003]: I0104 12:11:37.558080 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.450418593 podStartE2EDuration="7.558058788s" podCreationTimestamp="2026-01-04 12:11:30 +0000 UTC" firstStartedPulling="2026-01-04 12:11:31.846826983 +0000 UTC m=+1407.319856824" lastFinishedPulling="2026-01-04 12:11:35.954467148 +0000 UTC m=+1411.427497019" observedRunningTime="2026-01-04 12:11:37.521689478 +0000 UTC m=+1412.994719349" watchObservedRunningTime="2026-01-04 12:11:37.558058788 +0000 UTC m=+1413.031088639" Jan 04 12:11:37 crc kubenswrapper[5003]: I0104 12:11:37.585298 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.85282033 podStartE2EDuration="7.585256349s" podCreationTimestamp="2026-01-04 12:11:30 +0000 UTC" firstStartedPulling="2026-01-04 12:11:32.203366301 +0000 UTC m=+1407.676396142" lastFinishedPulling="2026-01-04 12:11:35.93580231 +0000 UTC m=+1411.408832161" observedRunningTime="2026-01-04 12:11:37.536849054 +0000 UTC m=+1413.009878895" watchObservedRunningTime="2026-01-04 12:11:37.585256349 +0000 UTC m=+1413.058286180" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.064827 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.119612 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/deea6419-c73a-47bb-a4da-9e3cbc80fb5a-combined-ca-bundle\") pod \"deea6419-c73a-47bb-a4da-9e3cbc80fb5a\" (UID: \"deea6419-c73a-47bb-a4da-9e3cbc80fb5a\") " Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.119739 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/deea6419-c73a-47bb-a4da-9e3cbc80fb5a-logs\") pod \"deea6419-c73a-47bb-a4da-9e3cbc80fb5a\" (UID: \"deea6419-c73a-47bb-a4da-9e3cbc80fb5a\") " Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.119864 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tq8p2\" (UniqueName: \"kubernetes.io/projected/deea6419-c73a-47bb-a4da-9e3cbc80fb5a-kube-api-access-tq8p2\") pod \"deea6419-c73a-47bb-a4da-9e3cbc80fb5a\" (UID: \"deea6419-c73a-47bb-a4da-9e3cbc80fb5a\") " Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.119939 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/deea6419-c73a-47bb-a4da-9e3cbc80fb5a-config-data\") pod \"deea6419-c73a-47bb-a4da-9e3cbc80fb5a\" (UID: \"deea6419-c73a-47bb-a4da-9e3cbc80fb5a\") " Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.120152 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/deea6419-c73a-47bb-a4da-9e3cbc80fb5a-logs" (OuterVolumeSpecName: "logs") pod "deea6419-c73a-47bb-a4da-9e3cbc80fb5a" (UID: "deea6419-c73a-47bb-a4da-9e3cbc80fb5a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.120541 5003 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/deea6419-c73a-47bb-a4da-9e3cbc80fb5a-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.127398 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/deea6419-c73a-47bb-a4da-9e3cbc80fb5a-kube-api-access-tq8p2" (OuterVolumeSpecName: "kube-api-access-tq8p2") pod "deea6419-c73a-47bb-a4da-9e3cbc80fb5a" (UID: "deea6419-c73a-47bb-a4da-9e3cbc80fb5a"). InnerVolumeSpecName "kube-api-access-tq8p2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.159700 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/deea6419-c73a-47bb-a4da-9e3cbc80fb5a-config-data" (OuterVolumeSpecName: "config-data") pod "deea6419-c73a-47bb-a4da-9e3cbc80fb5a" (UID: "deea6419-c73a-47bb-a4da-9e3cbc80fb5a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.162139 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/deea6419-c73a-47bb-a4da-9e3cbc80fb5a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "deea6419-c73a-47bb-a4da-9e3cbc80fb5a" (UID: "deea6419-c73a-47bb-a4da-9e3cbc80fb5a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.222180 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tq8p2\" (UniqueName: \"kubernetes.io/projected/deea6419-c73a-47bb-a4da-9e3cbc80fb5a-kube-api-access-tq8p2\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.222580 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/deea6419-c73a-47bb-a4da-9e3cbc80fb5a-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.222592 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/deea6419-c73a-47bb-a4da-9e3cbc80fb5a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.475443 5003 generic.go:334] "Generic (PLEG): container finished" podID="deea6419-c73a-47bb-a4da-9e3cbc80fb5a" containerID="676a8f2c452b1da7a61c38680f3abc1c005a5b1969b6ce761f246889cd41d850" exitCode=0 Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.475498 5003 generic.go:334] "Generic (PLEG): container finished" podID="deea6419-c73a-47bb-a4da-9e3cbc80fb5a" containerID="5c0e554922f948afccf01654900433bcb7efcd48170435468c066697370162b7" exitCode=143 Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.475533 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.475547 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"deea6419-c73a-47bb-a4da-9e3cbc80fb5a","Type":"ContainerDied","Data":"676a8f2c452b1da7a61c38680f3abc1c005a5b1969b6ce761f246889cd41d850"} Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.475608 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"deea6419-c73a-47bb-a4da-9e3cbc80fb5a","Type":"ContainerDied","Data":"5c0e554922f948afccf01654900433bcb7efcd48170435468c066697370162b7"} Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.475623 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"deea6419-c73a-47bb-a4da-9e3cbc80fb5a","Type":"ContainerDied","Data":"16ba24c25a0c73bb1678582f320141a717f8e6661c2d4e946c9068805101831e"} Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.475630 5003 scope.go:117] "RemoveContainer" containerID="676a8f2c452b1da7a61c38680f3abc1c005a5b1969b6ce761f246889cd41d850" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.519261 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.521107 5003 scope.go:117] "RemoveContainer" containerID="5c0e554922f948afccf01654900433bcb7efcd48170435468c066697370162b7" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.533312 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.566833 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:11:38 crc kubenswrapper[5003]: E0104 12:11:38.567474 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="deea6419-c73a-47bb-a4da-9e3cbc80fb5a" containerName="nova-metadata-metadata" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.567500 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="deea6419-c73a-47bb-a4da-9e3cbc80fb5a" containerName="nova-metadata-metadata" Jan 04 12:11:38 crc kubenswrapper[5003]: E0104 12:11:38.567541 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="deea6419-c73a-47bb-a4da-9e3cbc80fb5a" containerName="nova-metadata-log" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.567549 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="deea6419-c73a-47bb-a4da-9e3cbc80fb5a" containerName="nova-metadata-log" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.567790 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="deea6419-c73a-47bb-a4da-9e3cbc80fb5a" containerName="nova-metadata-log" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.567825 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="deea6419-c73a-47bb-a4da-9e3cbc80fb5a" containerName="nova-metadata-metadata" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.569267 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.578273 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.578550 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.596469 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.604142 5003 scope.go:117] "RemoveContainer" containerID="676a8f2c452b1da7a61c38680f3abc1c005a5b1969b6ce761f246889cd41d850" Jan 04 12:11:38 crc kubenswrapper[5003]: E0104 12:11:38.607260 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"676a8f2c452b1da7a61c38680f3abc1c005a5b1969b6ce761f246889cd41d850\": container with ID starting with 676a8f2c452b1da7a61c38680f3abc1c005a5b1969b6ce761f246889cd41d850 not found: ID does not exist" containerID="676a8f2c452b1da7a61c38680f3abc1c005a5b1969b6ce761f246889cd41d850" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.607339 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"676a8f2c452b1da7a61c38680f3abc1c005a5b1969b6ce761f246889cd41d850"} err="failed to get container status \"676a8f2c452b1da7a61c38680f3abc1c005a5b1969b6ce761f246889cd41d850\": rpc error: code = NotFound desc = could not find container \"676a8f2c452b1da7a61c38680f3abc1c005a5b1969b6ce761f246889cd41d850\": container with ID starting with 676a8f2c452b1da7a61c38680f3abc1c005a5b1969b6ce761f246889cd41d850 not found: ID does not exist" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.607376 5003 scope.go:117] "RemoveContainer" containerID="5c0e554922f948afccf01654900433bcb7efcd48170435468c066697370162b7" Jan 04 12:11:38 crc kubenswrapper[5003]: E0104 12:11:38.608479 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c0e554922f948afccf01654900433bcb7efcd48170435468c066697370162b7\": container with ID starting with 5c0e554922f948afccf01654900433bcb7efcd48170435468c066697370162b7 not found: ID does not exist" containerID="5c0e554922f948afccf01654900433bcb7efcd48170435468c066697370162b7" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.608529 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c0e554922f948afccf01654900433bcb7efcd48170435468c066697370162b7"} err="failed to get container status \"5c0e554922f948afccf01654900433bcb7efcd48170435468c066697370162b7\": rpc error: code = NotFound desc = could not find container \"5c0e554922f948afccf01654900433bcb7efcd48170435468c066697370162b7\": container with ID starting with 5c0e554922f948afccf01654900433bcb7efcd48170435468c066697370162b7 not found: ID does not exist" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.608572 5003 scope.go:117] "RemoveContainer" containerID="676a8f2c452b1da7a61c38680f3abc1c005a5b1969b6ce761f246889cd41d850" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.609617 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"676a8f2c452b1da7a61c38680f3abc1c005a5b1969b6ce761f246889cd41d850"} err="failed to get container status \"676a8f2c452b1da7a61c38680f3abc1c005a5b1969b6ce761f246889cd41d850\": rpc error: code = NotFound desc = could not find container \"676a8f2c452b1da7a61c38680f3abc1c005a5b1969b6ce761f246889cd41d850\": container with ID starting with 676a8f2c452b1da7a61c38680f3abc1c005a5b1969b6ce761f246889cd41d850 not found: ID does not exist" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.609674 5003 scope.go:117] "RemoveContainer" containerID="5c0e554922f948afccf01654900433bcb7efcd48170435468c066697370162b7" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.610238 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c0e554922f948afccf01654900433bcb7efcd48170435468c066697370162b7"} err="failed to get container status \"5c0e554922f948afccf01654900433bcb7efcd48170435468c066697370162b7\": rpc error: code = NotFound desc = could not find container \"5c0e554922f948afccf01654900433bcb7efcd48170435468c066697370162b7\": container with ID starting with 5c0e554922f948afccf01654900433bcb7efcd48170435468c066697370162b7 not found: ID does not exist" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.634720 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f3f2d25a-1aed-4916-87c9-ebb3053e1e41-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f3f2d25a-1aed-4916-87c9-ebb3053e1e41\") " pod="openstack/nova-metadata-0" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.634773 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3f2d25a-1aed-4916-87c9-ebb3053e1e41-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f3f2d25a-1aed-4916-87c9-ebb3053e1e41\") " pod="openstack/nova-metadata-0" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.634802 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3f2d25a-1aed-4916-87c9-ebb3053e1e41-config-data\") pod \"nova-metadata-0\" (UID: \"f3f2d25a-1aed-4916-87c9-ebb3053e1e41\") " pod="openstack/nova-metadata-0" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.635067 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zq5vs\" (UniqueName: \"kubernetes.io/projected/f3f2d25a-1aed-4916-87c9-ebb3053e1e41-kube-api-access-zq5vs\") pod \"nova-metadata-0\" (UID: \"f3f2d25a-1aed-4916-87c9-ebb3053e1e41\") " pod="openstack/nova-metadata-0" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.635169 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f3f2d25a-1aed-4916-87c9-ebb3053e1e41-logs\") pod \"nova-metadata-0\" (UID: \"f3f2d25a-1aed-4916-87c9-ebb3053e1e41\") " pod="openstack/nova-metadata-0" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.737004 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f3f2d25a-1aed-4916-87c9-ebb3053e1e41-logs\") pod \"nova-metadata-0\" (UID: \"f3f2d25a-1aed-4916-87c9-ebb3053e1e41\") " pod="openstack/nova-metadata-0" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.737179 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f3f2d25a-1aed-4916-87c9-ebb3053e1e41-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f3f2d25a-1aed-4916-87c9-ebb3053e1e41\") " pod="openstack/nova-metadata-0" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.737207 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3f2d25a-1aed-4916-87c9-ebb3053e1e41-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f3f2d25a-1aed-4916-87c9-ebb3053e1e41\") " pod="openstack/nova-metadata-0" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.737229 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3f2d25a-1aed-4916-87c9-ebb3053e1e41-config-data\") pod \"nova-metadata-0\" (UID: \"f3f2d25a-1aed-4916-87c9-ebb3053e1e41\") " pod="openstack/nova-metadata-0" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.737500 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f3f2d25a-1aed-4916-87c9-ebb3053e1e41-logs\") pod \"nova-metadata-0\" (UID: \"f3f2d25a-1aed-4916-87c9-ebb3053e1e41\") " pod="openstack/nova-metadata-0" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.737280 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zq5vs\" (UniqueName: \"kubernetes.io/projected/f3f2d25a-1aed-4916-87c9-ebb3053e1e41-kube-api-access-zq5vs\") pod \"nova-metadata-0\" (UID: \"f3f2d25a-1aed-4916-87c9-ebb3053e1e41\") " pod="openstack/nova-metadata-0" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.742915 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3f2d25a-1aed-4916-87c9-ebb3053e1e41-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f3f2d25a-1aed-4916-87c9-ebb3053e1e41\") " pod="openstack/nova-metadata-0" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.743223 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3f2d25a-1aed-4916-87c9-ebb3053e1e41-config-data\") pod \"nova-metadata-0\" (UID: \"f3f2d25a-1aed-4916-87c9-ebb3053e1e41\") " pod="openstack/nova-metadata-0" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.748077 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f3f2d25a-1aed-4916-87c9-ebb3053e1e41-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f3f2d25a-1aed-4916-87c9-ebb3053e1e41\") " pod="openstack/nova-metadata-0" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.759320 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zq5vs\" (UniqueName: \"kubernetes.io/projected/f3f2d25a-1aed-4916-87c9-ebb3053e1e41-kube-api-access-zq5vs\") pod \"nova-metadata-0\" (UID: \"f3f2d25a-1aed-4916-87c9-ebb3053e1e41\") " pod="openstack/nova-metadata-0" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.819839 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="deea6419-c73a-47bb-a4da-9e3cbc80fb5a" path="/var/lib/kubelet/pods/deea6419-c73a-47bb-a4da-9e3cbc80fb5a/volumes" Jan 04 12:11:38 crc kubenswrapper[5003]: I0104 12:11:38.898507 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 04 12:11:39 crc kubenswrapper[5003]: I0104 12:11:39.388785 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:11:39 crc kubenswrapper[5003]: I0104 12:11:39.487311 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f3f2d25a-1aed-4916-87c9-ebb3053e1e41","Type":"ContainerStarted","Data":"afc5aa08328964620b9d4356dbc41c5c65dd3a569c87b7c1c84b3c2c5897114b"} Jan 04 12:11:40 crc kubenswrapper[5003]: I0104 12:11:40.496043 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f3f2d25a-1aed-4916-87c9-ebb3053e1e41","Type":"ContainerStarted","Data":"f9c197888c5bb675f8e538991456f03562d2621e09759296179fab3c06d9affd"} Jan 04 12:11:40 crc kubenswrapper[5003]: I0104 12:11:40.497957 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f3f2d25a-1aed-4916-87c9-ebb3053e1e41","Type":"ContainerStarted","Data":"e5694da42f4c9adcb53350bce0e5b206dba01a5f6e5bcc3bf765b828ebca82da"} Jan 04 12:11:40 crc kubenswrapper[5003]: I0104 12:11:40.498810 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-jmh76" event={"ID":"3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2","Type":"ContainerDied","Data":"57cbe7cb23145556ede2cb126df0b6f2b4320f9a6656889cbe5ab3eb6dd0b2cf"} Jan 04 12:11:40 crc kubenswrapper[5003]: I0104 12:11:40.498643 5003 generic.go:334] "Generic (PLEG): container finished" podID="3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2" containerID="57cbe7cb23145556ede2cb126df0b6f2b4320f9a6656889cbe5ab3eb6dd0b2cf" exitCode=0 Jan 04 12:11:40 crc kubenswrapper[5003]: I0104 12:11:40.518655 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.518637674 podStartE2EDuration="2.518637674s" podCreationTimestamp="2026-01-04 12:11:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:11:40.515271716 +0000 UTC m=+1415.988301557" watchObservedRunningTime="2026-01-04 12:11:40.518637674 +0000 UTC m=+1415.991667505" Jan 04 12:11:40 crc kubenswrapper[5003]: I0104 12:11:40.535121 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rgh64" Jan 04 12:11:40 crc kubenswrapper[5003]: I0104 12:11:40.587201 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rgh64" Jan 04 12:11:40 crc kubenswrapper[5003]: I0104 12:11:40.697133 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 04 12:11:40 crc kubenswrapper[5003]: I0104 12:11:40.697178 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 04 12:11:40 crc kubenswrapper[5003]: I0104 12:11:40.706963 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 04 12:11:40 crc kubenswrapper[5003]: I0104 12:11:40.707356 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 04 12:11:40 crc kubenswrapper[5003]: I0104 12:11:40.735629 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 04 12:11:40 crc kubenswrapper[5003]: I0104 12:11:40.778768 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rgh64"] Jan 04 12:11:41 crc kubenswrapper[5003]: I0104 12:11:41.248613 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:11:41 crc kubenswrapper[5003]: I0104 12:11:41.323263 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-647df7b8c5-wg2m4" Jan 04 12:11:41 crc kubenswrapper[5003]: I0104 12:11:41.391269 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75dbb546bf-qzckq"] Jan 04 12:11:41 crc kubenswrapper[5003]: I0104 12:11:41.391543 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-75dbb546bf-qzckq" podUID="0f8f3494-b6ed-4200-9e60-32d895c76f6a" containerName="dnsmasq-dns" containerID="cri-o://de96d3ecee90124a628c00b13197012153a0130bf34e1bbe2b0c0bdab385c56c" gracePeriod=10 Jan 04 12:11:41 crc kubenswrapper[5003]: I0104 12:11:41.513877 5003 generic.go:334] "Generic (PLEG): container finished" podID="99bfe48b-6290-49a4-b08e-f81e305df2bc" containerID="43bfec4bd608685be4573015f15db1873d268c8b7296b6e047586f2093d24609" exitCode=0 Jan 04 12:11:41 crc kubenswrapper[5003]: I0104 12:11:41.514735 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-nb5kc" event={"ID":"99bfe48b-6290-49a4-b08e-f81e305df2bc","Type":"ContainerDied","Data":"43bfec4bd608685be4573015f15db1873d268c8b7296b6e047586f2093d24609"} Jan 04 12:11:41 crc kubenswrapper[5003]: I0104 12:11:41.578975 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 04 12:11:41 crc kubenswrapper[5003]: I0104 12:11:41.780240 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="8aded6ca-c07f-4915-9953-d774d9a7de8a" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.188:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 04 12:11:41 crc kubenswrapper[5003]: I0104 12:11:41.780544 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="8aded6ca-c07f-4915-9953-d774d9a7de8a" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.188:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.053833 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-jmh76" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.064901 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75dbb546bf-qzckq" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.104594 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dvktj\" (UniqueName: \"kubernetes.io/projected/3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2-kube-api-access-dvktj\") pod \"3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2\" (UID: \"3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2\") " Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.104702 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2-scripts\") pod \"3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2\" (UID: \"3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2\") " Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.104807 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2-config-data\") pod \"3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2\" (UID: \"3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2\") " Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.104846 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2-combined-ca-bundle\") pod \"3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2\" (UID: \"3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2\") " Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.114277 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2-scripts" (OuterVolumeSpecName: "scripts") pod "3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2" (UID: "3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.125342 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2-kube-api-access-dvktj" (OuterVolumeSpecName: "kube-api-access-dvktj") pod "3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2" (UID: "3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2"). InnerVolumeSpecName "kube-api-access-dvktj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.157435 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2-config-data" (OuterVolumeSpecName: "config-data") pod "3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2" (UID: "3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.167705 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2" (UID: "3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.206472 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zc5rg\" (UniqueName: \"kubernetes.io/projected/0f8f3494-b6ed-4200-9e60-32d895c76f6a-kube-api-access-zc5rg\") pod \"0f8f3494-b6ed-4200-9e60-32d895c76f6a\" (UID: \"0f8f3494-b6ed-4200-9e60-32d895c76f6a\") " Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.206550 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f8f3494-b6ed-4200-9e60-32d895c76f6a-config\") pod \"0f8f3494-b6ed-4200-9e60-32d895c76f6a\" (UID: \"0f8f3494-b6ed-4200-9e60-32d895c76f6a\") " Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.206601 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0f8f3494-b6ed-4200-9e60-32d895c76f6a-ovsdbserver-nb\") pod \"0f8f3494-b6ed-4200-9e60-32d895c76f6a\" (UID: \"0f8f3494-b6ed-4200-9e60-32d895c76f6a\") " Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.206620 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0f8f3494-b6ed-4200-9e60-32d895c76f6a-ovsdbserver-sb\") pod \"0f8f3494-b6ed-4200-9e60-32d895c76f6a\" (UID: \"0f8f3494-b6ed-4200-9e60-32d895c76f6a\") " Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.206649 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0f8f3494-b6ed-4200-9e60-32d895c76f6a-dns-svc\") pod \"0f8f3494-b6ed-4200-9e60-32d895c76f6a\" (UID: \"0f8f3494-b6ed-4200-9e60-32d895c76f6a\") " Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.206728 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0f8f3494-b6ed-4200-9e60-32d895c76f6a-dns-swift-storage-0\") pod \"0f8f3494-b6ed-4200-9e60-32d895c76f6a\" (UID: \"0f8f3494-b6ed-4200-9e60-32d895c76f6a\") " Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.207192 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.207207 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.207216 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.207229 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dvktj\" (UniqueName: \"kubernetes.io/projected/3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2-kube-api-access-dvktj\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.211567 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f8f3494-b6ed-4200-9e60-32d895c76f6a-kube-api-access-zc5rg" (OuterVolumeSpecName: "kube-api-access-zc5rg") pod "0f8f3494-b6ed-4200-9e60-32d895c76f6a" (UID: "0f8f3494-b6ed-4200-9e60-32d895c76f6a"). InnerVolumeSpecName "kube-api-access-zc5rg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.257130 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f8f3494-b6ed-4200-9e60-32d895c76f6a-config" (OuterVolumeSpecName: "config") pod "0f8f3494-b6ed-4200-9e60-32d895c76f6a" (UID: "0f8f3494-b6ed-4200-9e60-32d895c76f6a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.257177 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f8f3494-b6ed-4200-9e60-32d895c76f6a-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "0f8f3494-b6ed-4200-9e60-32d895c76f6a" (UID: "0f8f3494-b6ed-4200-9e60-32d895c76f6a"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.308547 5003 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0f8f3494-b6ed-4200-9e60-32d895c76f6a-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.308580 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zc5rg\" (UniqueName: \"kubernetes.io/projected/0f8f3494-b6ed-4200-9e60-32d895c76f6a-kube-api-access-zc5rg\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.308589 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f8f3494-b6ed-4200-9e60-32d895c76f6a-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.319569 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f8f3494-b6ed-4200-9e60-32d895c76f6a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0f8f3494-b6ed-4200-9e60-32d895c76f6a" (UID: "0f8f3494-b6ed-4200-9e60-32d895c76f6a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.346517 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f8f3494-b6ed-4200-9e60-32d895c76f6a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0f8f3494-b6ed-4200-9e60-32d895c76f6a" (UID: "0f8f3494-b6ed-4200-9e60-32d895c76f6a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.393576 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f8f3494-b6ed-4200-9e60-32d895c76f6a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0f8f3494-b6ed-4200-9e60-32d895c76f6a" (UID: "0f8f3494-b6ed-4200-9e60-32d895c76f6a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.413338 5003 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0f8f3494-b6ed-4200-9e60-32d895c76f6a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.413384 5003 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0f8f3494-b6ed-4200-9e60-32d895c76f6a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.413399 5003 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0f8f3494-b6ed-4200-9e60-32d895c76f6a-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.527794 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-jmh76" event={"ID":"3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2","Type":"ContainerDied","Data":"75506db27f84a317b5c5cc8525f9656c44c0fbed000399f941156405fdde9a1b"} Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.527835 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="75506db27f84a317b5c5cc8525f9656c44c0fbed000399f941156405fdde9a1b" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.527891 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-jmh76" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.531831 5003 generic.go:334] "Generic (PLEG): container finished" podID="0f8f3494-b6ed-4200-9e60-32d895c76f6a" containerID="de96d3ecee90124a628c00b13197012153a0130bf34e1bbe2b0c0bdab385c56c" exitCode=0 Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.532116 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rgh64" podUID="4d0624ef-34a3-4ea3-b7ac-1c8415a3080a" containerName="registry-server" containerID="cri-o://f192bada85465290612f3b88d09a2afbf10bc142ece6aecc537bae40f6c9c1a3" gracePeriod=2 Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.533711 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-75dbb546bf-qzckq" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.533774 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75dbb546bf-qzckq" event={"ID":"0f8f3494-b6ed-4200-9e60-32d895c76f6a","Type":"ContainerDied","Data":"de96d3ecee90124a628c00b13197012153a0130bf34e1bbe2b0c0bdab385c56c"} Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.533810 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-75dbb546bf-qzckq" event={"ID":"0f8f3494-b6ed-4200-9e60-32d895c76f6a","Type":"ContainerDied","Data":"eda91c171cc29023d65503c9cd17383f2854f0efb01135c55d84d1c70f720e43"} Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.533828 5003 scope.go:117] "RemoveContainer" containerID="de96d3ecee90124a628c00b13197012153a0130bf34e1bbe2b0c0bdab385c56c" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.610577 5003 scope.go:117] "RemoveContainer" containerID="166764e8d75c8ee2f0445664874964a0d41198a944591c07ed1ccd2289360373" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.620804 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-75dbb546bf-qzckq"] Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.629217 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-75dbb546bf-qzckq"] Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.749537 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.749786 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="8aded6ca-c07f-4915-9953-d774d9a7de8a" containerName="nova-api-log" containerID="cri-o://65b2b0fcfd292709c290c173cd35333a5957aa08976e557cd1eef0862f7d7596" gracePeriod=30 Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.750271 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="8aded6ca-c07f-4915-9953-d774d9a7de8a" containerName="nova-api-api" containerID="cri-o://bfd56bd97860c6de31d1424c736fd93990f98a8f0d0239d0ebabbe7f8f754bc5" gracePeriod=30 Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.775856 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.827842 5003 scope.go:117] "RemoveContainer" containerID="de96d3ecee90124a628c00b13197012153a0130bf34e1bbe2b0c0bdab385c56c" Jan 04 12:11:42 crc kubenswrapper[5003]: E0104 12:11:42.829242 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de96d3ecee90124a628c00b13197012153a0130bf34e1bbe2b0c0bdab385c56c\": container with ID starting with de96d3ecee90124a628c00b13197012153a0130bf34e1bbe2b0c0bdab385c56c not found: ID does not exist" containerID="de96d3ecee90124a628c00b13197012153a0130bf34e1bbe2b0c0bdab385c56c" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.829293 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de96d3ecee90124a628c00b13197012153a0130bf34e1bbe2b0c0bdab385c56c"} err="failed to get container status \"de96d3ecee90124a628c00b13197012153a0130bf34e1bbe2b0c0bdab385c56c\": rpc error: code = NotFound desc = could not find container \"de96d3ecee90124a628c00b13197012153a0130bf34e1bbe2b0c0bdab385c56c\": container with ID starting with de96d3ecee90124a628c00b13197012153a0130bf34e1bbe2b0c0bdab385c56c not found: ID does not exist" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.829321 5003 scope.go:117] "RemoveContainer" containerID="166764e8d75c8ee2f0445664874964a0d41198a944591c07ed1ccd2289360373" Jan 04 12:11:42 crc kubenswrapper[5003]: E0104 12:11:42.829998 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"166764e8d75c8ee2f0445664874964a0d41198a944591c07ed1ccd2289360373\": container with ID starting with 166764e8d75c8ee2f0445664874964a0d41198a944591c07ed1ccd2289360373 not found: ID does not exist" containerID="166764e8d75c8ee2f0445664874964a0d41198a944591c07ed1ccd2289360373" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.830113 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"166764e8d75c8ee2f0445664874964a0d41198a944591c07ed1ccd2289360373"} err="failed to get container status \"166764e8d75c8ee2f0445664874964a0d41198a944591c07ed1ccd2289360373\": rpc error: code = NotFound desc = could not find container \"166764e8d75c8ee2f0445664874964a0d41198a944591c07ed1ccd2289360373\": container with ID starting with 166764e8d75c8ee2f0445664874964a0d41198a944591c07ed1ccd2289360373 not found: ID does not exist" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.834924 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f8f3494-b6ed-4200-9e60-32d895c76f6a" path="/var/lib/kubelet/pods/0f8f3494-b6ed-4200-9e60-32d895c76f6a/volumes" Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.835752 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:11:42 crc kubenswrapper[5003]: I0104 12:11:42.954053 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-nb5kc" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.026102 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q8sj8\" (UniqueName: \"kubernetes.io/projected/99bfe48b-6290-49a4-b08e-f81e305df2bc-kube-api-access-q8sj8\") pod \"99bfe48b-6290-49a4-b08e-f81e305df2bc\" (UID: \"99bfe48b-6290-49a4-b08e-f81e305df2bc\") " Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.026276 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/99bfe48b-6290-49a4-b08e-f81e305df2bc-scripts\") pod \"99bfe48b-6290-49a4-b08e-f81e305df2bc\" (UID: \"99bfe48b-6290-49a4-b08e-f81e305df2bc\") " Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.026331 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99bfe48b-6290-49a4-b08e-f81e305df2bc-config-data\") pod \"99bfe48b-6290-49a4-b08e-f81e305df2bc\" (UID: \"99bfe48b-6290-49a4-b08e-f81e305df2bc\") " Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.026360 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99bfe48b-6290-49a4-b08e-f81e305df2bc-combined-ca-bundle\") pod \"99bfe48b-6290-49a4-b08e-f81e305df2bc\" (UID: \"99bfe48b-6290-49a4-b08e-f81e305df2bc\") " Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.030543 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99bfe48b-6290-49a4-b08e-f81e305df2bc-scripts" (OuterVolumeSpecName: "scripts") pod "99bfe48b-6290-49a4-b08e-f81e305df2bc" (UID: "99bfe48b-6290-49a4-b08e-f81e305df2bc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.033457 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99bfe48b-6290-49a4-b08e-f81e305df2bc-kube-api-access-q8sj8" (OuterVolumeSpecName: "kube-api-access-q8sj8") pod "99bfe48b-6290-49a4-b08e-f81e305df2bc" (UID: "99bfe48b-6290-49a4-b08e-f81e305df2bc"). InnerVolumeSpecName "kube-api-access-q8sj8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.056849 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99bfe48b-6290-49a4-b08e-f81e305df2bc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "99bfe48b-6290-49a4-b08e-f81e305df2bc" (UID: "99bfe48b-6290-49a4-b08e-f81e305df2bc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.063375 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99bfe48b-6290-49a4-b08e-f81e305df2bc-config-data" (OuterVolumeSpecName: "config-data") pod "99bfe48b-6290-49a4-b08e-f81e305df2bc" (UID: "99bfe48b-6290-49a4-b08e-f81e305df2bc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.129002 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q8sj8\" (UniqueName: \"kubernetes.io/projected/99bfe48b-6290-49a4-b08e-f81e305df2bc-kube-api-access-q8sj8\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.129046 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/99bfe48b-6290-49a4-b08e-f81e305df2bc-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.129056 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99bfe48b-6290-49a4-b08e-f81e305df2bc-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.129068 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99bfe48b-6290-49a4-b08e-f81e305df2bc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.554329 5003 generic.go:334] "Generic (PLEG): container finished" podID="4d0624ef-34a3-4ea3-b7ac-1c8415a3080a" containerID="f192bada85465290612f3b88d09a2afbf10bc142ece6aecc537bae40f6c9c1a3" exitCode=0 Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.554428 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rgh64" event={"ID":"4d0624ef-34a3-4ea3-b7ac-1c8415a3080a","Type":"ContainerDied","Data":"f192bada85465290612f3b88d09a2afbf10bc142ece6aecc537bae40f6c9c1a3"} Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.557770 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-nb5kc" event={"ID":"99bfe48b-6290-49a4-b08e-f81e305df2bc","Type":"ContainerDied","Data":"751d6cd56dec714acab5e9c2aa926e32893b8d1f916a0e2419322db1b151ea0e"} Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.558101 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="751d6cd56dec714acab5e9c2aa926e32893b8d1f916a0e2419322db1b151ea0e" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.558203 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-nb5kc" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.570106 5003 generic.go:334] "Generic (PLEG): container finished" podID="8aded6ca-c07f-4915-9953-d774d9a7de8a" containerID="65b2b0fcfd292709c290c173cd35333a5957aa08976e557cd1eef0862f7d7596" exitCode=143 Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.570344 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="f3f2d25a-1aed-4916-87c9-ebb3053e1e41" containerName="nova-metadata-log" containerID="cri-o://e5694da42f4c9adcb53350bce0e5b206dba01a5f6e5bcc3bf765b828ebca82da" gracePeriod=30 Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.571632 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8aded6ca-c07f-4915-9953-d774d9a7de8a","Type":"ContainerDied","Data":"65b2b0fcfd292709c290c173cd35333a5957aa08976e557cd1eef0862f7d7596"} Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.571997 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="3e8376ba-e98a-4912-abde-994448dafc7e" containerName="nova-scheduler-scheduler" containerID="cri-o://13c4fd594d53e19837682f482fbb54240d69f43b501ac7e99f8f2bb724133942" gracePeriod=30 Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.573163 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="f3f2d25a-1aed-4916-87c9-ebb3053e1e41" containerName="nova-metadata-metadata" containerID="cri-o://f9c197888c5bb675f8e538991456f03562d2621e09759296179fab3c06d9affd" gracePeriod=30 Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.654472 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 04 12:11:43 crc kubenswrapper[5003]: E0104 12:11:43.655356 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f8f3494-b6ed-4200-9e60-32d895c76f6a" containerName="init" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.655382 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f8f3494-b6ed-4200-9e60-32d895c76f6a" containerName="init" Jan 04 12:11:43 crc kubenswrapper[5003]: E0104 12:11:43.655402 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2" containerName="nova-manage" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.655409 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2" containerName="nova-manage" Jan 04 12:11:43 crc kubenswrapper[5003]: E0104 12:11:43.655420 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99bfe48b-6290-49a4-b08e-f81e305df2bc" containerName="nova-cell1-conductor-db-sync" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.655426 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="99bfe48b-6290-49a4-b08e-f81e305df2bc" containerName="nova-cell1-conductor-db-sync" Jan 04 12:11:43 crc kubenswrapper[5003]: E0104 12:11:43.655437 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f8f3494-b6ed-4200-9e60-32d895c76f6a" containerName="dnsmasq-dns" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.655498 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f8f3494-b6ed-4200-9e60-32d895c76f6a" containerName="dnsmasq-dns" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.655733 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2" containerName="nova-manage" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.655756 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="99bfe48b-6290-49a4-b08e-f81e305df2bc" containerName="nova-cell1-conductor-db-sync" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.655764 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f8f3494-b6ed-4200-9e60-32d895c76f6a" containerName="dnsmasq-dns" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.658813 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.661310 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.705263 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.740635 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5c8h\" (UniqueName: \"kubernetes.io/projected/ff44c8db-792b-491a-879a-7e1ae7717a0f-kube-api-access-g5c8h\") pod \"nova-cell1-conductor-0\" (UID: \"ff44c8db-792b-491a-879a-7e1ae7717a0f\") " pod="openstack/nova-cell1-conductor-0" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.740763 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff44c8db-792b-491a-879a-7e1ae7717a0f-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"ff44c8db-792b-491a-879a-7e1ae7717a0f\") " pod="openstack/nova-cell1-conductor-0" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.740821 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff44c8db-792b-491a-879a-7e1ae7717a0f-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"ff44c8db-792b-491a-879a-7e1ae7717a0f\") " pod="openstack/nova-cell1-conductor-0" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.842255 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5c8h\" (UniqueName: \"kubernetes.io/projected/ff44c8db-792b-491a-879a-7e1ae7717a0f-kube-api-access-g5c8h\") pod \"nova-cell1-conductor-0\" (UID: \"ff44c8db-792b-491a-879a-7e1ae7717a0f\") " pod="openstack/nova-cell1-conductor-0" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.842394 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff44c8db-792b-491a-879a-7e1ae7717a0f-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"ff44c8db-792b-491a-879a-7e1ae7717a0f\") " pod="openstack/nova-cell1-conductor-0" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.842444 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff44c8db-792b-491a-879a-7e1ae7717a0f-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"ff44c8db-792b-491a-879a-7e1ae7717a0f\") " pod="openstack/nova-cell1-conductor-0" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.850667 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff44c8db-792b-491a-879a-7e1ae7717a0f-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"ff44c8db-792b-491a-879a-7e1ae7717a0f\") " pod="openstack/nova-cell1-conductor-0" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.850674 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff44c8db-792b-491a-879a-7e1ae7717a0f-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"ff44c8db-792b-491a-879a-7e1ae7717a0f\") " pod="openstack/nova-cell1-conductor-0" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.865290 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5c8h\" (UniqueName: \"kubernetes.io/projected/ff44c8db-792b-491a-879a-7e1ae7717a0f-kube-api-access-g5c8h\") pod \"nova-cell1-conductor-0\" (UID: \"ff44c8db-792b-491a-879a-7e1ae7717a0f\") " pod="openstack/nova-cell1-conductor-0" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.903316 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.903383 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.935986 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 04 12:11:43 crc kubenswrapper[5003]: I0104 12:11:43.940685 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rgh64" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.053544 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d0624ef-34a3-4ea3-b7ac-1c8415a3080a-utilities\") pod \"4d0624ef-34a3-4ea3-b7ac-1c8415a3080a\" (UID: \"4d0624ef-34a3-4ea3-b7ac-1c8415a3080a\") " Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.053783 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9nj9q\" (UniqueName: \"kubernetes.io/projected/4d0624ef-34a3-4ea3-b7ac-1c8415a3080a-kube-api-access-9nj9q\") pod \"4d0624ef-34a3-4ea3-b7ac-1c8415a3080a\" (UID: \"4d0624ef-34a3-4ea3-b7ac-1c8415a3080a\") " Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.054608 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d0624ef-34a3-4ea3-b7ac-1c8415a3080a-utilities" (OuterVolumeSpecName: "utilities") pod "4d0624ef-34a3-4ea3-b7ac-1c8415a3080a" (UID: "4d0624ef-34a3-4ea3-b7ac-1c8415a3080a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.054677 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d0624ef-34a3-4ea3-b7ac-1c8415a3080a-catalog-content\") pod \"4d0624ef-34a3-4ea3-b7ac-1c8415a3080a\" (UID: \"4d0624ef-34a3-4ea3-b7ac-1c8415a3080a\") " Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.056053 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d0624ef-34a3-4ea3-b7ac-1c8415a3080a-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.059624 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d0624ef-34a3-4ea3-b7ac-1c8415a3080a-kube-api-access-9nj9q" (OuterVolumeSpecName: "kube-api-access-9nj9q") pod "4d0624ef-34a3-4ea3-b7ac-1c8415a3080a" (UID: "4d0624ef-34a3-4ea3-b7ac-1c8415a3080a"). InnerVolumeSpecName "kube-api-access-9nj9q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.107644 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.157709 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3f2d25a-1aed-4916-87c9-ebb3053e1e41-config-data\") pod \"f3f2d25a-1aed-4916-87c9-ebb3053e1e41\" (UID: \"f3f2d25a-1aed-4916-87c9-ebb3053e1e41\") " Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.157827 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zq5vs\" (UniqueName: \"kubernetes.io/projected/f3f2d25a-1aed-4916-87c9-ebb3053e1e41-kube-api-access-zq5vs\") pod \"f3f2d25a-1aed-4916-87c9-ebb3053e1e41\" (UID: \"f3f2d25a-1aed-4916-87c9-ebb3053e1e41\") " Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.157892 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f3f2d25a-1aed-4916-87c9-ebb3053e1e41-logs\") pod \"f3f2d25a-1aed-4916-87c9-ebb3053e1e41\" (UID: \"f3f2d25a-1aed-4916-87c9-ebb3053e1e41\") " Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.157997 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f3f2d25a-1aed-4916-87c9-ebb3053e1e41-nova-metadata-tls-certs\") pod \"f3f2d25a-1aed-4916-87c9-ebb3053e1e41\" (UID: \"f3f2d25a-1aed-4916-87c9-ebb3053e1e41\") " Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.158032 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3f2d25a-1aed-4916-87c9-ebb3053e1e41-combined-ca-bundle\") pod \"f3f2d25a-1aed-4916-87c9-ebb3053e1e41\" (UID: \"f3f2d25a-1aed-4916-87c9-ebb3053e1e41\") " Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.158544 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9nj9q\" (UniqueName: \"kubernetes.io/projected/4d0624ef-34a3-4ea3-b7ac-1c8415a3080a-kube-api-access-9nj9q\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.160844 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3f2d25a-1aed-4916-87c9-ebb3053e1e41-logs" (OuterVolumeSpecName: "logs") pod "f3f2d25a-1aed-4916-87c9-ebb3053e1e41" (UID: "f3f2d25a-1aed-4916-87c9-ebb3053e1e41"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.163825 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3f2d25a-1aed-4916-87c9-ebb3053e1e41-kube-api-access-zq5vs" (OuterVolumeSpecName: "kube-api-access-zq5vs") pod "f3f2d25a-1aed-4916-87c9-ebb3053e1e41" (UID: "f3f2d25a-1aed-4916-87c9-ebb3053e1e41"). InnerVolumeSpecName "kube-api-access-zq5vs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.181806 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d0624ef-34a3-4ea3-b7ac-1c8415a3080a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4d0624ef-34a3-4ea3-b7ac-1c8415a3080a" (UID: "4d0624ef-34a3-4ea3-b7ac-1c8415a3080a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.193166 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3f2d25a-1aed-4916-87c9-ebb3053e1e41-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f3f2d25a-1aed-4916-87c9-ebb3053e1e41" (UID: "f3f2d25a-1aed-4916-87c9-ebb3053e1e41"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.195540 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3f2d25a-1aed-4916-87c9-ebb3053e1e41-config-data" (OuterVolumeSpecName: "config-data") pod "f3f2d25a-1aed-4916-87c9-ebb3053e1e41" (UID: "f3f2d25a-1aed-4916-87c9-ebb3053e1e41"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.223793 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3f2d25a-1aed-4916-87c9-ebb3053e1e41-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "f3f2d25a-1aed-4916-87c9-ebb3053e1e41" (UID: "f3f2d25a-1aed-4916-87c9-ebb3053e1e41"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.260835 5003 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f3f2d25a-1aed-4916-87c9-ebb3053e1e41-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.260875 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3f2d25a-1aed-4916-87c9-ebb3053e1e41-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.260888 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d0624ef-34a3-4ea3-b7ac-1c8415a3080a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.260904 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3f2d25a-1aed-4916-87c9-ebb3053e1e41-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.260919 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zq5vs\" (UniqueName: \"kubernetes.io/projected/f3f2d25a-1aed-4916-87c9-ebb3053e1e41-kube-api-access-zq5vs\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.260931 5003 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f3f2d25a-1aed-4916-87c9-ebb3053e1e41-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.451604 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 04 12:11:44 crc kubenswrapper[5003]: W0104 12:11:44.463552 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podff44c8db_792b_491a_879a_7e1ae7717a0f.slice/crio-ca814a1af9bf85d5efcdb5e3da6ad02d89800fe0f39da8423f9602e50147b621 WatchSource:0}: Error finding container ca814a1af9bf85d5efcdb5e3da6ad02d89800fe0f39da8423f9602e50147b621: Status 404 returned error can't find the container with id ca814a1af9bf85d5efcdb5e3da6ad02d89800fe0f39da8423f9602e50147b621 Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.586657 5003 generic.go:334] "Generic (PLEG): container finished" podID="f3f2d25a-1aed-4916-87c9-ebb3053e1e41" containerID="f9c197888c5bb675f8e538991456f03562d2621e09759296179fab3c06d9affd" exitCode=0 Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.587191 5003 generic.go:334] "Generic (PLEG): container finished" podID="f3f2d25a-1aed-4916-87c9-ebb3053e1e41" containerID="e5694da42f4c9adcb53350bce0e5b206dba01a5f6e5bcc3bf765b828ebca82da" exitCode=143 Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.586720 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f3f2d25a-1aed-4916-87c9-ebb3053e1e41","Type":"ContainerDied","Data":"f9c197888c5bb675f8e538991456f03562d2621e09759296179fab3c06d9affd"} Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.586786 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.587304 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f3f2d25a-1aed-4916-87c9-ebb3053e1e41","Type":"ContainerDied","Data":"e5694da42f4c9adcb53350bce0e5b206dba01a5f6e5bcc3bf765b828ebca82da"} Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.587353 5003 scope.go:117] "RemoveContainer" containerID="f9c197888c5bb675f8e538991456f03562d2621e09759296179fab3c06d9affd" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.587354 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f3f2d25a-1aed-4916-87c9-ebb3053e1e41","Type":"ContainerDied","Data":"afc5aa08328964620b9d4356dbc41c5c65dd3a569c87b7c1c84b3c2c5897114b"} Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.589853 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"ff44c8db-792b-491a-879a-7e1ae7717a0f","Type":"ContainerStarted","Data":"ca814a1af9bf85d5efcdb5e3da6ad02d89800fe0f39da8423f9602e50147b621"} Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.594216 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rgh64" event={"ID":"4d0624ef-34a3-4ea3-b7ac-1c8415a3080a","Type":"ContainerDied","Data":"25867ccd846c6ed87392656ab51468805d768e9de309609dce08c61364d2d834"} Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.594320 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rgh64" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.633006 5003 scope.go:117] "RemoveContainer" containerID="e5694da42f4c9adcb53350bce0e5b206dba01a5f6e5bcc3bf765b828ebca82da" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.682158 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rgh64"] Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.693912 5003 scope.go:117] "RemoveContainer" containerID="f9c197888c5bb675f8e538991456f03562d2621e09759296179fab3c06d9affd" Jan 04 12:11:44 crc kubenswrapper[5003]: E0104 12:11:44.694565 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9c197888c5bb675f8e538991456f03562d2621e09759296179fab3c06d9affd\": container with ID starting with f9c197888c5bb675f8e538991456f03562d2621e09759296179fab3c06d9affd not found: ID does not exist" containerID="f9c197888c5bb675f8e538991456f03562d2621e09759296179fab3c06d9affd" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.694620 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9c197888c5bb675f8e538991456f03562d2621e09759296179fab3c06d9affd"} err="failed to get container status \"f9c197888c5bb675f8e538991456f03562d2621e09759296179fab3c06d9affd\": rpc error: code = NotFound desc = could not find container \"f9c197888c5bb675f8e538991456f03562d2621e09759296179fab3c06d9affd\": container with ID starting with f9c197888c5bb675f8e538991456f03562d2621e09759296179fab3c06d9affd not found: ID does not exist" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.694662 5003 scope.go:117] "RemoveContainer" containerID="e5694da42f4c9adcb53350bce0e5b206dba01a5f6e5bcc3bf765b828ebca82da" Jan 04 12:11:44 crc kubenswrapper[5003]: E0104 12:11:44.695283 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5694da42f4c9adcb53350bce0e5b206dba01a5f6e5bcc3bf765b828ebca82da\": container with ID starting with e5694da42f4c9adcb53350bce0e5b206dba01a5f6e5bcc3bf765b828ebca82da not found: ID does not exist" containerID="e5694da42f4c9adcb53350bce0e5b206dba01a5f6e5bcc3bf765b828ebca82da" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.695333 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5694da42f4c9adcb53350bce0e5b206dba01a5f6e5bcc3bf765b828ebca82da"} err="failed to get container status \"e5694da42f4c9adcb53350bce0e5b206dba01a5f6e5bcc3bf765b828ebca82da\": rpc error: code = NotFound desc = could not find container \"e5694da42f4c9adcb53350bce0e5b206dba01a5f6e5bcc3bf765b828ebca82da\": container with ID starting with e5694da42f4c9adcb53350bce0e5b206dba01a5f6e5bcc3bf765b828ebca82da not found: ID does not exist" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.695379 5003 scope.go:117] "RemoveContainer" containerID="f9c197888c5bb675f8e538991456f03562d2621e09759296179fab3c06d9affd" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.695823 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9c197888c5bb675f8e538991456f03562d2621e09759296179fab3c06d9affd"} err="failed to get container status \"f9c197888c5bb675f8e538991456f03562d2621e09759296179fab3c06d9affd\": rpc error: code = NotFound desc = could not find container \"f9c197888c5bb675f8e538991456f03562d2621e09759296179fab3c06d9affd\": container with ID starting with f9c197888c5bb675f8e538991456f03562d2621e09759296179fab3c06d9affd not found: ID does not exist" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.695853 5003 scope.go:117] "RemoveContainer" containerID="e5694da42f4c9adcb53350bce0e5b206dba01a5f6e5bcc3bf765b828ebca82da" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.696059 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rgh64"] Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.696497 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5694da42f4c9adcb53350bce0e5b206dba01a5f6e5bcc3bf765b828ebca82da"} err="failed to get container status \"e5694da42f4c9adcb53350bce0e5b206dba01a5f6e5bcc3bf765b828ebca82da\": rpc error: code = NotFound desc = could not find container \"e5694da42f4c9adcb53350bce0e5b206dba01a5f6e5bcc3bf765b828ebca82da\": container with ID starting with e5694da42f4c9adcb53350bce0e5b206dba01a5f6e5bcc3bf765b828ebca82da not found: ID does not exist" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.696533 5003 scope.go:117] "RemoveContainer" containerID="f192bada85465290612f3b88d09a2afbf10bc142ece6aecc537bae40f6c9c1a3" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.722082 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.781120 5003 scope.go:117] "RemoveContainer" containerID="97470e8d3ac5ad6ac580afac5a5c13df77ee58798c55318d8c55408595c0ff4b" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.787570 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.794872 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:11:44 crc kubenswrapper[5003]: E0104 12:11:44.816352 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d0624ef-34a3-4ea3-b7ac-1c8415a3080a" containerName="registry-server" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.816574 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d0624ef-34a3-4ea3-b7ac-1c8415a3080a" containerName="registry-server" Jan 04 12:11:44 crc kubenswrapper[5003]: E0104 12:11:44.816710 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d0624ef-34a3-4ea3-b7ac-1c8415a3080a" containerName="extract-content" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.816727 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d0624ef-34a3-4ea3-b7ac-1c8415a3080a" containerName="extract-content" Jan 04 12:11:44 crc kubenswrapper[5003]: E0104 12:11:44.816774 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d0624ef-34a3-4ea3-b7ac-1c8415a3080a" containerName="extract-utilities" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.816786 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d0624ef-34a3-4ea3-b7ac-1c8415a3080a" containerName="extract-utilities" Jan 04 12:11:44 crc kubenswrapper[5003]: E0104 12:11:44.816828 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3f2d25a-1aed-4916-87c9-ebb3053e1e41" containerName="nova-metadata-log" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.816838 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3f2d25a-1aed-4916-87c9-ebb3053e1e41" containerName="nova-metadata-log" Jan 04 12:11:44 crc kubenswrapper[5003]: E0104 12:11:44.816859 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3f2d25a-1aed-4916-87c9-ebb3053e1e41" containerName="nova-metadata-metadata" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.816869 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3f2d25a-1aed-4916-87c9-ebb3053e1e41" containerName="nova-metadata-metadata" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.818530 5003 scope.go:117] "RemoveContainer" containerID="8948d796b917674a577983e425464ae342b79f7ab015a92cb9b0d8279ae86853" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.819411 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3f2d25a-1aed-4916-87c9-ebb3053e1e41" containerName="nova-metadata-log" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.819468 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d0624ef-34a3-4ea3-b7ac-1c8415a3080a" containerName="registry-server" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.819501 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3f2d25a-1aed-4916-87c9-ebb3053e1e41" containerName="nova-metadata-metadata" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.831332 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.836571 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.839647 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.866848 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d0624ef-34a3-4ea3-b7ac-1c8415a3080a" path="/var/lib/kubelet/pods/4d0624ef-34a3-4ea3-b7ac-1c8415a3080a/volumes" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.868094 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3f2d25a-1aed-4916-87c9-ebb3053e1e41" path="/var/lib/kubelet/pods/f3f2d25a-1aed-4916-87c9-ebb3053e1e41/volumes" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.868887 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.890727 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/318a10f9-0b3b-46c4-a3d7-978d795b67e4-config-data\") pod \"nova-metadata-0\" (UID: \"318a10f9-0b3b-46c4-a3d7-978d795b67e4\") " pod="openstack/nova-metadata-0" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.890813 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5c9hq\" (UniqueName: \"kubernetes.io/projected/318a10f9-0b3b-46c4-a3d7-978d795b67e4-kube-api-access-5c9hq\") pod \"nova-metadata-0\" (UID: \"318a10f9-0b3b-46c4-a3d7-978d795b67e4\") " pod="openstack/nova-metadata-0" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.890867 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/318a10f9-0b3b-46c4-a3d7-978d795b67e4-logs\") pod \"nova-metadata-0\" (UID: \"318a10f9-0b3b-46c4-a3d7-978d795b67e4\") " pod="openstack/nova-metadata-0" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.890888 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/318a10f9-0b3b-46c4-a3d7-978d795b67e4-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"318a10f9-0b3b-46c4-a3d7-978d795b67e4\") " pod="openstack/nova-metadata-0" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.890987 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/318a10f9-0b3b-46c4-a3d7-978d795b67e4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"318a10f9-0b3b-46c4-a3d7-978d795b67e4\") " pod="openstack/nova-metadata-0" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.994201 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/318a10f9-0b3b-46c4-a3d7-978d795b67e4-logs\") pod \"nova-metadata-0\" (UID: \"318a10f9-0b3b-46c4-a3d7-978d795b67e4\") " pod="openstack/nova-metadata-0" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.994280 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/318a10f9-0b3b-46c4-a3d7-978d795b67e4-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"318a10f9-0b3b-46c4-a3d7-978d795b67e4\") " pod="openstack/nova-metadata-0" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.994458 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/318a10f9-0b3b-46c4-a3d7-978d795b67e4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"318a10f9-0b3b-46c4-a3d7-978d795b67e4\") " pod="openstack/nova-metadata-0" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.994507 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/318a10f9-0b3b-46c4-a3d7-978d795b67e4-config-data\") pod \"nova-metadata-0\" (UID: \"318a10f9-0b3b-46c4-a3d7-978d795b67e4\") " pod="openstack/nova-metadata-0" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.994551 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5c9hq\" (UniqueName: \"kubernetes.io/projected/318a10f9-0b3b-46c4-a3d7-978d795b67e4-kube-api-access-5c9hq\") pod \"nova-metadata-0\" (UID: \"318a10f9-0b3b-46c4-a3d7-978d795b67e4\") " pod="openstack/nova-metadata-0" Jan 04 12:11:44 crc kubenswrapper[5003]: I0104 12:11:44.995064 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/318a10f9-0b3b-46c4-a3d7-978d795b67e4-logs\") pod \"nova-metadata-0\" (UID: \"318a10f9-0b3b-46c4-a3d7-978d795b67e4\") " pod="openstack/nova-metadata-0" Jan 04 12:11:45 crc kubenswrapper[5003]: I0104 12:11:45.002786 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/318a10f9-0b3b-46c4-a3d7-978d795b67e4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"318a10f9-0b3b-46c4-a3d7-978d795b67e4\") " pod="openstack/nova-metadata-0" Jan 04 12:11:45 crc kubenswrapper[5003]: I0104 12:11:45.002799 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/318a10f9-0b3b-46c4-a3d7-978d795b67e4-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"318a10f9-0b3b-46c4-a3d7-978d795b67e4\") " pod="openstack/nova-metadata-0" Jan 04 12:11:45 crc kubenswrapper[5003]: I0104 12:11:45.006238 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/318a10f9-0b3b-46c4-a3d7-978d795b67e4-config-data\") pod \"nova-metadata-0\" (UID: \"318a10f9-0b3b-46c4-a3d7-978d795b67e4\") " pod="openstack/nova-metadata-0" Jan 04 12:11:45 crc kubenswrapper[5003]: I0104 12:11:45.014564 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5c9hq\" (UniqueName: \"kubernetes.io/projected/318a10f9-0b3b-46c4-a3d7-978d795b67e4-kube-api-access-5c9hq\") pod \"nova-metadata-0\" (UID: \"318a10f9-0b3b-46c4-a3d7-978d795b67e4\") " pod="openstack/nova-metadata-0" Jan 04 12:11:45 crc kubenswrapper[5003]: I0104 12:11:45.193177 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 04 12:11:45 crc kubenswrapper[5003]: I0104 12:11:45.614593 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"ff44c8db-792b-491a-879a-7e1ae7717a0f","Type":"ContainerStarted","Data":"c036815a0e7e6c9f9f1d469ae796fad9bf4425d1308d4bca8f8c39aa199aae5a"} Jan 04 12:11:45 crc kubenswrapper[5003]: I0104 12:11:45.616561 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Jan 04 12:11:45 crc kubenswrapper[5003]: I0104 12:11:45.652028 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.651992706 podStartE2EDuration="2.651992706s" podCreationTimestamp="2026-01-04 12:11:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:11:45.645940988 +0000 UTC m=+1421.118970859" watchObservedRunningTime="2026-01-04 12:11:45.651992706 +0000 UTC m=+1421.125022557" Jan 04 12:11:45 crc kubenswrapper[5003]: I0104 12:11:45.705110 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:11:45 crc kubenswrapper[5003]: W0104 12:11:45.705213 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod318a10f9_0b3b_46c4_a3d7_978d795b67e4.slice/crio-4f0487bf6ef5efb956b64eb21c34478bfcbe3fa6710ad2dbe057261dfe986565 WatchSource:0}: Error finding container 4f0487bf6ef5efb956b64eb21c34478bfcbe3fa6710ad2dbe057261dfe986565: Status 404 returned error can't find the container with id 4f0487bf6ef5efb956b64eb21c34478bfcbe3fa6710ad2dbe057261dfe986565 Jan 04 12:11:45 crc kubenswrapper[5003]: E0104 12:11:45.708862 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="13c4fd594d53e19837682f482fbb54240d69f43b501ac7e99f8f2bb724133942" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 04 12:11:45 crc kubenswrapper[5003]: E0104 12:11:45.710724 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="13c4fd594d53e19837682f482fbb54240d69f43b501ac7e99f8f2bb724133942" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 04 12:11:45 crc kubenswrapper[5003]: E0104 12:11:45.714212 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="13c4fd594d53e19837682f482fbb54240d69f43b501ac7e99f8f2bb724133942" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 04 12:11:45 crc kubenswrapper[5003]: E0104 12:11:45.714259 5003 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="3e8376ba-e98a-4912-abde-994448dafc7e" containerName="nova-scheduler-scheduler" Jan 04 12:11:46 crc kubenswrapper[5003]: I0104 12:11:46.632844 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"318a10f9-0b3b-46c4-a3d7-978d795b67e4","Type":"ContainerStarted","Data":"5f0ff55a5228bd2c7cce6f8b494982689ecaf0845f8cc4b99c6855bbd52077ae"} Jan 04 12:11:46 crc kubenswrapper[5003]: I0104 12:11:46.633547 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"318a10f9-0b3b-46c4-a3d7-978d795b67e4","Type":"ContainerStarted","Data":"6591fb08d275b9ad5bb9d8eaf8fa974fea5226307b28d5b2749f852c425ee738"} Jan 04 12:11:46 crc kubenswrapper[5003]: I0104 12:11:46.633559 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"318a10f9-0b3b-46c4-a3d7-978d795b67e4","Type":"ContainerStarted","Data":"4f0487bf6ef5efb956b64eb21c34478bfcbe3fa6710ad2dbe057261dfe986565"} Jan 04 12:11:46 crc kubenswrapper[5003]: I0104 12:11:46.668384 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.668349858 podStartE2EDuration="2.668349858s" podCreationTimestamp="2026-01-04 12:11:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:11:46.661328525 +0000 UTC m=+1422.134358396" watchObservedRunningTime="2026-01-04 12:11:46.668349858 +0000 UTC m=+1422.141379759" Jan 04 12:11:47 crc kubenswrapper[5003]: I0104 12:11:47.625755 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 04 12:11:47 crc kubenswrapper[5003]: I0104 12:11:47.661580 5003 generic.go:334] "Generic (PLEG): container finished" podID="8aded6ca-c07f-4915-9953-d774d9a7de8a" containerID="bfd56bd97860c6de31d1424c736fd93990f98a8f0d0239d0ebabbe7f8f754bc5" exitCode=0 Jan 04 12:11:47 crc kubenswrapper[5003]: I0104 12:11:47.662743 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 04 12:11:47 crc kubenswrapper[5003]: I0104 12:11:47.663314 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8aded6ca-c07f-4915-9953-d774d9a7de8a","Type":"ContainerDied","Data":"bfd56bd97860c6de31d1424c736fd93990f98a8f0d0239d0ebabbe7f8f754bc5"} Jan 04 12:11:47 crc kubenswrapper[5003]: I0104 12:11:47.663349 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8aded6ca-c07f-4915-9953-d774d9a7de8a","Type":"ContainerDied","Data":"0aaf836633f71bc7a055f5fbd19828d291d9cecb62d0c788af0525b927955626"} Jan 04 12:11:47 crc kubenswrapper[5003]: I0104 12:11:47.663370 5003 scope.go:117] "RemoveContainer" containerID="bfd56bd97860c6de31d1424c736fd93990f98a8f0d0239d0ebabbe7f8f754bc5" Jan 04 12:11:47 crc kubenswrapper[5003]: I0104 12:11:47.694283 5003 scope.go:117] "RemoveContainer" containerID="65b2b0fcfd292709c290c173cd35333a5957aa08976e557cd1eef0862f7d7596" Jan 04 12:11:47 crc kubenswrapper[5003]: I0104 12:11:47.716261 5003 scope.go:117] "RemoveContainer" containerID="bfd56bd97860c6de31d1424c736fd93990f98a8f0d0239d0ebabbe7f8f754bc5" Jan 04 12:11:47 crc kubenswrapper[5003]: E0104 12:11:47.716857 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bfd56bd97860c6de31d1424c736fd93990f98a8f0d0239d0ebabbe7f8f754bc5\": container with ID starting with bfd56bd97860c6de31d1424c736fd93990f98a8f0d0239d0ebabbe7f8f754bc5 not found: ID does not exist" containerID="bfd56bd97860c6de31d1424c736fd93990f98a8f0d0239d0ebabbe7f8f754bc5" Jan 04 12:11:47 crc kubenswrapper[5003]: I0104 12:11:47.716916 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfd56bd97860c6de31d1424c736fd93990f98a8f0d0239d0ebabbe7f8f754bc5"} err="failed to get container status \"bfd56bd97860c6de31d1424c736fd93990f98a8f0d0239d0ebabbe7f8f754bc5\": rpc error: code = NotFound desc = could not find container \"bfd56bd97860c6de31d1424c736fd93990f98a8f0d0239d0ebabbe7f8f754bc5\": container with ID starting with bfd56bd97860c6de31d1424c736fd93990f98a8f0d0239d0ebabbe7f8f754bc5 not found: ID does not exist" Jan 04 12:11:47 crc kubenswrapper[5003]: I0104 12:11:47.716952 5003 scope.go:117] "RemoveContainer" containerID="65b2b0fcfd292709c290c173cd35333a5957aa08976e557cd1eef0862f7d7596" Jan 04 12:11:47 crc kubenswrapper[5003]: E0104 12:11:47.717551 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65b2b0fcfd292709c290c173cd35333a5957aa08976e557cd1eef0862f7d7596\": container with ID starting with 65b2b0fcfd292709c290c173cd35333a5957aa08976e557cd1eef0862f7d7596 not found: ID does not exist" containerID="65b2b0fcfd292709c290c173cd35333a5957aa08976e557cd1eef0862f7d7596" Jan 04 12:11:47 crc kubenswrapper[5003]: I0104 12:11:47.717592 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65b2b0fcfd292709c290c173cd35333a5957aa08976e557cd1eef0862f7d7596"} err="failed to get container status \"65b2b0fcfd292709c290c173cd35333a5957aa08976e557cd1eef0862f7d7596\": rpc error: code = NotFound desc = could not find container \"65b2b0fcfd292709c290c173cd35333a5957aa08976e557cd1eef0862f7d7596\": container with ID starting with 65b2b0fcfd292709c290c173cd35333a5957aa08976e557cd1eef0862f7d7596 not found: ID does not exist" Jan 04 12:11:47 crc kubenswrapper[5003]: I0104 12:11:47.777784 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v85s7\" (UniqueName: \"kubernetes.io/projected/8aded6ca-c07f-4915-9953-d774d9a7de8a-kube-api-access-v85s7\") pod \"8aded6ca-c07f-4915-9953-d774d9a7de8a\" (UID: \"8aded6ca-c07f-4915-9953-d774d9a7de8a\") " Jan 04 12:11:47 crc kubenswrapper[5003]: I0104 12:11:47.778041 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8aded6ca-c07f-4915-9953-d774d9a7de8a-logs\") pod \"8aded6ca-c07f-4915-9953-d774d9a7de8a\" (UID: \"8aded6ca-c07f-4915-9953-d774d9a7de8a\") " Jan 04 12:11:47 crc kubenswrapper[5003]: I0104 12:11:47.778091 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8aded6ca-c07f-4915-9953-d774d9a7de8a-config-data\") pod \"8aded6ca-c07f-4915-9953-d774d9a7de8a\" (UID: \"8aded6ca-c07f-4915-9953-d774d9a7de8a\") " Jan 04 12:11:47 crc kubenswrapper[5003]: I0104 12:11:47.778197 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8aded6ca-c07f-4915-9953-d774d9a7de8a-combined-ca-bundle\") pod \"8aded6ca-c07f-4915-9953-d774d9a7de8a\" (UID: \"8aded6ca-c07f-4915-9953-d774d9a7de8a\") " Jan 04 12:11:47 crc kubenswrapper[5003]: I0104 12:11:47.778814 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8aded6ca-c07f-4915-9953-d774d9a7de8a-logs" (OuterVolumeSpecName: "logs") pod "8aded6ca-c07f-4915-9953-d774d9a7de8a" (UID: "8aded6ca-c07f-4915-9953-d774d9a7de8a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:11:47 crc kubenswrapper[5003]: I0104 12:11:47.790375 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8aded6ca-c07f-4915-9953-d774d9a7de8a-kube-api-access-v85s7" (OuterVolumeSpecName: "kube-api-access-v85s7") pod "8aded6ca-c07f-4915-9953-d774d9a7de8a" (UID: "8aded6ca-c07f-4915-9953-d774d9a7de8a"). InnerVolumeSpecName "kube-api-access-v85s7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:11:47 crc kubenswrapper[5003]: I0104 12:11:47.813338 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8aded6ca-c07f-4915-9953-d774d9a7de8a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8aded6ca-c07f-4915-9953-d774d9a7de8a" (UID: "8aded6ca-c07f-4915-9953-d774d9a7de8a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:47 crc kubenswrapper[5003]: I0104 12:11:47.834204 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8aded6ca-c07f-4915-9953-d774d9a7de8a-config-data" (OuterVolumeSpecName: "config-data") pod "8aded6ca-c07f-4915-9953-d774d9a7de8a" (UID: "8aded6ca-c07f-4915-9953-d774d9a7de8a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:47 crc kubenswrapper[5003]: I0104 12:11:47.881142 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8aded6ca-c07f-4915-9953-d774d9a7de8a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:47 crc kubenswrapper[5003]: I0104 12:11:47.881181 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v85s7\" (UniqueName: \"kubernetes.io/projected/8aded6ca-c07f-4915-9953-d774d9a7de8a-kube-api-access-v85s7\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:47 crc kubenswrapper[5003]: I0104 12:11:47.881210 5003 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8aded6ca-c07f-4915-9953-d774d9a7de8a-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:47 crc kubenswrapper[5003]: I0104 12:11:47.881219 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8aded6ca-c07f-4915-9953-d774d9a7de8a-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.010881 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.058200 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.092872 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 04 12:11:48 crc kubenswrapper[5003]: E0104 12:11:48.093339 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8aded6ca-c07f-4915-9953-d774d9a7de8a" containerName="nova-api-api" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.093366 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8aded6ca-c07f-4915-9953-d774d9a7de8a" containerName="nova-api-api" Jan 04 12:11:48 crc kubenswrapper[5003]: E0104 12:11:48.093406 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8aded6ca-c07f-4915-9953-d774d9a7de8a" containerName="nova-api-log" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.093415 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8aded6ca-c07f-4915-9953-d774d9a7de8a" containerName="nova-api-log" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.093604 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="8aded6ca-c07f-4915-9953-d774d9a7de8a" containerName="nova-api-api" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.093630 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="8aded6ca-c07f-4915-9953-d774d9a7de8a" containerName="nova-api-log" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.095850 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.098272 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.114802 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.188595 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f6f0f96-f5e0-4941-ae0e-61300c0709f0-config-data\") pod \"nova-api-0\" (UID: \"2f6f0f96-f5e0-4941-ae0e-61300c0709f0\") " pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.188682 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5hb27\" (UniqueName: \"kubernetes.io/projected/2f6f0f96-f5e0-4941-ae0e-61300c0709f0-kube-api-access-5hb27\") pod \"nova-api-0\" (UID: \"2f6f0f96-f5e0-4941-ae0e-61300c0709f0\") " pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.188707 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f6f0f96-f5e0-4941-ae0e-61300c0709f0-logs\") pod \"nova-api-0\" (UID: \"2f6f0f96-f5e0-4941-ae0e-61300c0709f0\") " pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.189283 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f6f0f96-f5e0-4941-ae0e-61300c0709f0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2f6f0f96-f5e0-4941-ae0e-61300c0709f0\") " pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.292104 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f6f0f96-f5e0-4941-ae0e-61300c0709f0-config-data\") pod \"nova-api-0\" (UID: \"2f6f0f96-f5e0-4941-ae0e-61300c0709f0\") " pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.292931 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5hb27\" (UniqueName: \"kubernetes.io/projected/2f6f0f96-f5e0-4941-ae0e-61300c0709f0-kube-api-access-5hb27\") pod \"nova-api-0\" (UID: \"2f6f0f96-f5e0-4941-ae0e-61300c0709f0\") " pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.292967 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f6f0f96-f5e0-4941-ae0e-61300c0709f0-logs\") pod \"nova-api-0\" (UID: \"2f6f0f96-f5e0-4941-ae0e-61300c0709f0\") " pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.293830 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f6f0f96-f5e0-4941-ae0e-61300c0709f0-logs\") pod \"nova-api-0\" (UID: \"2f6f0f96-f5e0-4941-ae0e-61300c0709f0\") " pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.296371 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f6f0f96-f5e0-4941-ae0e-61300c0709f0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2f6f0f96-f5e0-4941-ae0e-61300c0709f0\") " pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.300620 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f6f0f96-f5e0-4941-ae0e-61300c0709f0-config-data\") pod \"nova-api-0\" (UID: \"2f6f0f96-f5e0-4941-ae0e-61300c0709f0\") " pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.300855 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f6f0f96-f5e0-4941-ae0e-61300c0709f0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2f6f0f96-f5e0-4941-ae0e-61300c0709f0\") " pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.315957 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5hb27\" (UniqueName: \"kubernetes.io/projected/2f6f0f96-f5e0-4941-ae0e-61300c0709f0-kube-api-access-5hb27\") pod \"nova-api-0\" (UID: \"2f6f0f96-f5e0-4941-ae0e-61300c0709f0\") " pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.446682 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.447137 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.603966 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e8376ba-e98a-4912-abde-994448dafc7e-config-data\") pod \"3e8376ba-e98a-4912-abde-994448dafc7e\" (UID: \"3e8376ba-e98a-4912-abde-994448dafc7e\") " Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.604712 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e8376ba-e98a-4912-abde-994448dafc7e-combined-ca-bundle\") pod \"3e8376ba-e98a-4912-abde-994448dafc7e\" (UID: \"3e8376ba-e98a-4912-abde-994448dafc7e\") " Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.604786 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vg5gg\" (UniqueName: \"kubernetes.io/projected/3e8376ba-e98a-4912-abde-994448dafc7e-kube-api-access-vg5gg\") pod \"3e8376ba-e98a-4912-abde-994448dafc7e\" (UID: \"3e8376ba-e98a-4912-abde-994448dafc7e\") " Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.611347 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e8376ba-e98a-4912-abde-994448dafc7e-kube-api-access-vg5gg" (OuterVolumeSpecName: "kube-api-access-vg5gg") pod "3e8376ba-e98a-4912-abde-994448dafc7e" (UID: "3e8376ba-e98a-4912-abde-994448dafc7e"). InnerVolumeSpecName "kube-api-access-vg5gg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.643279 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e8376ba-e98a-4912-abde-994448dafc7e-config-data" (OuterVolumeSpecName: "config-data") pod "3e8376ba-e98a-4912-abde-994448dafc7e" (UID: "3e8376ba-e98a-4912-abde-994448dafc7e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.684268 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e8376ba-e98a-4912-abde-994448dafc7e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3e8376ba-e98a-4912-abde-994448dafc7e" (UID: "3e8376ba-e98a-4912-abde-994448dafc7e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.690456 5003 generic.go:334] "Generic (PLEG): container finished" podID="3e8376ba-e98a-4912-abde-994448dafc7e" containerID="13c4fd594d53e19837682f482fbb54240d69f43b501ac7e99f8f2bb724133942" exitCode=0 Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.690570 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3e8376ba-e98a-4912-abde-994448dafc7e","Type":"ContainerDied","Data":"13c4fd594d53e19837682f482fbb54240d69f43b501ac7e99f8f2bb724133942"} Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.690658 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3e8376ba-e98a-4912-abde-994448dafc7e","Type":"ContainerDied","Data":"45e170aa2137a457962fbe440870f670272501978687193c1257564b67a188d9"} Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.690679 5003 scope.go:117] "RemoveContainer" containerID="13c4fd594d53e19837682f482fbb54240d69f43b501ac7e99f8f2bb724133942" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.690885 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.707322 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e8376ba-e98a-4912-abde-994448dafc7e-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.707357 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e8376ba-e98a-4912-abde-994448dafc7e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.707374 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vg5gg\" (UniqueName: \"kubernetes.io/projected/3e8376ba-e98a-4912-abde-994448dafc7e-kube-api-access-vg5gg\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.773258 5003 scope.go:117] "RemoveContainer" containerID="13c4fd594d53e19837682f482fbb54240d69f43b501ac7e99f8f2bb724133942" Jan 04 12:11:48 crc kubenswrapper[5003]: E0104 12:11:48.776351 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"13c4fd594d53e19837682f482fbb54240d69f43b501ac7e99f8f2bb724133942\": container with ID starting with 13c4fd594d53e19837682f482fbb54240d69f43b501ac7e99f8f2bb724133942 not found: ID does not exist" containerID="13c4fd594d53e19837682f482fbb54240d69f43b501ac7e99f8f2bb724133942" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.776405 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13c4fd594d53e19837682f482fbb54240d69f43b501ac7e99f8f2bb724133942"} err="failed to get container status \"13c4fd594d53e19837682f482fbb54240d69f43b501ac7e99f8f2bb724133942\": rpc error: code = NotFound desc = could not find container \"13c4fd594d53e19837682f482fbb54240d69f43b501ac7e99f8f2bb724133942\": container with ID starting with 13c4fd594d53e19837682f482fbb54240d69f43b501ac7e99f8f2bb724133942 not found: ID does not exist" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.787604 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.802263 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.819904 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e8376ba-e98a-4912-abde-994448dafc7e" path="/var/lib/kubelet/pods/3e8376ba-e98a-4912-abde-994448dafc7e/volumes" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.820528 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8aded6ca-c07f-4915-9953-d774d9a7de8a" path="/var/lib/kubelet/pods/8aded6ca-c07f-4915-9953-d774d9a7de8a/volumes" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.837227 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:11:48 crc kubenswrapper[5003]: E0104 12:11:48.837837 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e8376ba-e98a-4912-abde-994448dafc7e" containerName="nova-scheduler-scheduler" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.837860 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e8376ba-e98a-4912-abde-994448dafc7e" containerName="nova-scheduler-scheduler" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.838245 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e8376ba-e98a-4912-abde-994448dafc7e" containerName="nova-scheduler-scheduler" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.839167 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.841492 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.848538 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.903245 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.913477 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t48kr\" (UniqueName: \"kubernetes.io/projected/37d57f7a-9e52-4f46-978b-20ba81ad8b22-kube-api-access-t48kr\") pod \"nova-scheduler-0\" (UID: \"37d57f7a-9e52-4f46-978b-20ba81ad8b22\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.913706 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37d57f7a-9e52-4f46-978b-20ba81ad8b22-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"37d57f7a-9e52-4f46-978b-20ba81ad8b22\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:48 crc kubenswrapper[5003]: I0104 12:11:48.913791 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37d57f7a-9e52-4f46-978b-20ba81ad8b22-config-data\") pod \"nova-scheduler-0\" (UID: \"37d57f7a-9e52-4f46-978b-20ba81ad8b22\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:49 crc kubenswrapper[5003]: I0104 12:11:49.016211 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37d57f7a-9e52-4f46-978b-20ba81ad8b22-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"37d57f7a-9e52-4f46-978b-20ba81ad8b22\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:49 crc kubenswrapper[5003]: I0104 12:11:49.016290 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37d57f7a-9e52-4f46-978b-20ba81ad8b22-config-data\") pod \"nova-scheduler-0\" (UID: \"37d57f7a-9e52-4f46-978b-20ba81ad8b22\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:49 crc kubenswrapper[5003]: I0104 12:11:49.016453 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t48kr\" (UniqueName: \"kubernetes.io/projected/37d57f7a-9e52-4f46-978b-20ba81ad8b22-kube-api-access-t48kr\") pod \"nova-scheduler-0\" (UID: \"37d57f7a-9e52-4f46-978b-20ba81ad8b22\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:49 crc kubenswrapper[5003]: I0104 12:11:49.023217 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37d57f7a-9e52-4f46-978b-20ba81ad8b22-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"37d57f7a-9e52-4f46-978b-20ba81ad8b22\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:49 crc kubenswrapper[5003]: I0104 12:11:49.023408 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37d57f7a-9e52-4f46-978b-20ba81ad8b22-config-data\") pod \"nova-scheduler-0\" (UID: \"37d57f7a-9e52-4f46-978b-20ba81ad8b22\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:49 crc kubenswrapper[5003]: I0104 12:11:49.032086 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t48kr\" (UniqueName: \"kubernetes.io/projected/37d57f7a-9e52-4f46-978b-20ba81ad8b22-kube-api-access-t48kr\") pod \"nova-scheduler-0\" (UID: \"37d57f7a-9e52-4f46-978b-20ba81ad8b22\") " pod="openstack/nova-scheduler-0" Jan 04 12:11:49 crc kubenswrapper[5003]: I0104 12:11:49.162829 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 04 12:11:49 crc kubenswrapper[5003]: I0104 12:11:49.697033 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:11:49 crc kubenswrapper[5003]: W0104 12:11:49.701176 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod37d57f7a_9e52_4f46_978b_20ba81ad8b22.slice/crio-35d7f6320bfed1bb90d211e94f67782b848a2ecedc694cc010a9f2fb974e6279 WatchSource:0}: Error finding container 35d7f6320bfed1bb90d211e94f67782b848a2ecedc694cc010a9f2fb974e6279: Status 404 returned error can't find the container with id 35d7f6320bfed1bb90d211e94f67782b848a2ecedc694cc010a9f2fb974e6279 Jan 04 12:11:49 crc kubenswrapper[5003]: I0104 12:11:49.709120 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2f6f0f96-f5e0-4941-ae0e-61300c0709f0","Type":"ContainerStarted","Data":"201a465779d214e48736e7c55e3a3f2c9b539bd43938ef85dcd2ea6e2dd79ba8"} Jan 04 12:11:49 crc kubenswrapper[5003]: I0104 12:11:49.709186 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2f6f0f96-f5e0-4941-ae0e-61300c0709f0","Type":"ContainerStarted","Data":"d876c470bb5311460d153b107e11b2b7ee80d059a9b8a6c247efe3447855b19e"} Jan 04 12:11:49 crc kubenswrapper[5003]: I0104 12:11:49.709214 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2f6f0f96-f5e0-4941-ae0e-61300c0709f0","Type":"ContainerStarted","Data":"f538beb5fcfaf33ae4d77f5cb23667cb250ae4ce951346ede7f6e60e7bb249ff"} Jan 04 12:11:49 crc kubenswrapper[5003]: I0104 12:11:49.734103 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.734085392 podStartE2EDuration="1.734085392s" podCreationTimestamp="2026-01-04 12:11:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:11:49.72672324 +0000 UTC m=+1425.199753081" watchObservedRunningTime="2026-01-04 12:11:49.734085392 +0000 UTC m=+1425.207115243" Jan 04 12:11:50 crc kubenswrapper[5003]: I0104 12:11:50.194135 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 04 12:11:50 crc kubenswrapper[5003]: I0104 12:11:50.194677 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 04 12:11:50 crc kubenswrapper[5003]: I0104 12:11:50.725185 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"37d57f7a-9e52-4f46-978b-20ba81ad8b22","Type":"ContainerStarted","Data":"8021a98cb925a34cef7ba24c638cdc7931569a63a525db07fb10719840c4cf6b"} Jan 04 12:11:50 crc kubenswrapper[5003]: I0104 12:11:50.725269 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"37d57f7a-9e52-4f46-978b-20ba81ad8b22","Type":"ContainerStarted","Data":"35d7f6320bfed1bb90d211e94f67782b848a2ecedc694cc010a9f2fb974e6279"} Jan 04 12:11:50 crc kubenswrapper[5003]: I0104 12:11:50.743735 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.74371586 podStartE2EDuration="2.74371586s" podCreationTimestamp="2026-01-04 12:11:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:11:50.741965994 +0000 UTC m=+1426.214995855" watchObservedRunningTime="2026-01-04 12:11:50.74371586 +0000 UTC m=+1426.216745701" Jan 04 12:11:51 crc kubenswrapper[5003]: I0104 12:11:51.534940 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jan 04 12:11:53 crc kubenswrapper[5003]: I0104 12:11:53.972763 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Jan 04 12:11:54 crc kubenswrapper[5003]: I0104 12:11:54.163563 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 04 12:11:55 crc kubenswrapper[5003]: I0104 12:11:55.152540 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 04 12:11:55 crc kubenswrapper[5003]: I0104 12:11:55.152835 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="df21c365-a2e2-4a85-8de8-f132fd605981" containerName="kube-state-metrics" containerID="cri-o://984e6528ce905587f41a617865e1de59b2395782ff928f4d6b455026fd4b7eaf" gracePeriod=30 Jan 04 12:11:55 crc kubenswrapper[5003]: I0104 12:11:55.193508 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 04 12:11:55 crc kubenswrapper[5003]: I0104 12:11:55.193561 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 04 12:11:55 crc kubenswrapper[5003]: I0104 12:11:55.655890 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 04 12:11:55 crc kubenswrapper[5003]: I0104 12:11:55.771318 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j4kj4\" (UniqueName: \"kubernetes.io/projected/df21c365-a2e2-4a85-8de8-f132fd605981-kube-api-access-j4kj4\") pod \"df21c365-a2e2-4a85-8de8-f132fd605981\" (UID: \"df21c365-a2e2-4a85-8de8-f132fd605981\") " Jan 04 12:11:55 crc kubenswrapper[5003]: I0104 12:11:55.782527 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df21c365-a2e2-4a85-8de8-f132fd605981-kube-api-access-j4kj4" (OuterVolumeSpecName: "kube-api-access-j4kj4") pod "df21c365-a2e2-4a85-8de8-f132fd605981" (UID: "df21c365-a2e2-4a85-8de8-f132fd605981"). InnerVolumeSpecName "kube-api-access-j4kj4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:11:55 crc kubenswrapper[5003]: I0104 12:11:55.783231 5003 generic.go:334] "Generic (PLEG): container finished" podID="df21c365-a2e2-4a85-8de8-f132fd605981" containerID="984e6528ce905587f41a617865e1de59b2395782ff928f4d6b455026fd4b7eaf" exitCode=2 Jan 04 12:11:55 crc kubenswrapper[5003]: I0104 12:11:55.783281 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"df21c365-a2e2-4a85-8de8-f132fd605981","Type":"ContainerDied","Data":"984e6528ce905587f41a617865e1de59b2395782ff928f4d6b455026fd4b7eaf"} Jan 04 12:11:55 crc kubenswrapper[5003]: I0104 12:11:55.783316 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"df21c365-a2e2-4a85-8de8-f132fd605981","Type":"ContainerDied","Data":"e8c4df52246c759f8a562a0e4cbeb7f7b5c2b3b130f46fe0791f7ef9e62ac095"} Jan 04 12:11:55 crc kubenswrapper[5003]: I0104 12:11:55.783334 5003 scope.go:117] "RemoveContainer" containerID="984e6528ce905587f41a617865e1de59b2395782ff928f4d6b455026fd4b7eaf" Jan 04 12:11:55 crc kubenswrapper[5003]: I0104 12:11:55.783467 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 04 12:11:55 crc kubenswrapper[5003]: I0104 12:11:55.868710 5003 scope.go:117] "RemoveContainer" containerID="984e6528ce905587f41a617865e1de59b2395782ff928f4d6b455026fd4b7eaf" Jan 04 12:11:55 crc kubenswrapper[5003]: E0104 12:11:55.871665 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"984e6528ce905587f41a617865e1de59b2395782ff928f4d6b455026fd4b7eaf\": container with ID starting with 984e6528ce905587f41a617865e1de59b2395782ff928f4d6b455026fd4b7eaf not found: ID does not exist" containerID="984e6528ce905587f41a617865e1de59b2395782ff928f4d6b455026fd4b7eaf" Jan 04 12:11:55 crc kubenswrapper[5003]: I0104 12:11:55.871837 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"984e6528ce905587f41a617865e1de59b2395782ff928f4d6b455026fd4b7eaf"} err="failed to get container status \"984e6528ce905587f41a617865e1de59b2395782ff928f4d6b455026fd4b7eaf\": rpc error: code = NotFound desc = could not find container \"984e6528ce905587f41a617865e1de59b2395782ff928f4d6b455026fd4b7eaf\": container with ID starting with 984e6528ce905587f41a617865e1de59b2395782ff928f4d6b455026fd4b7eaf not found: ID does not exist" Jan 04 12:11:55 crc kubenswrapper[5003]: I0104 12:11:55.874124 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j4kj4\" (UniqueName: \"kubernetes.io/projected/df21c365-a2e2-4a85-8de8-f132fd605981-kube-api-access-j4kj4\") on node \"crc\" DevicePath \"\"" Jan 04 12:11:55 crc kubenswrapper[5003]: I0104 12:11:55.875597 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 04 12:11:55 crc kubenswrapper[5003]: I0104 12:11:55.883985 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 04 12:11:55 crc kubenswrapper[5003]: I0104 12:11:55.962334 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Jan 04 12:11:55 crc kubenswrapper[5003]: E0104 12:11:55.984966 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df21c365-a2e2-4a85-8de8-f132fd605981" containerName="kube-state-metrics" Jan 04 12:11:55 crc kubenswrapper[5003]: I0104 12:11:55.985032 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="df21c365-a2e2-4a85-8de8-f132fd605981" containerName="kube-state-metrics" Jan 04 12:11:55 crc kubenswrapper[5003]: I0104 12:11:55.985748 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="df21c365-a2e2-4a85-8de8-f132fd605981" containerName="kube-state-metrics" Jan 04 12:11:55 crc kubenswrapper[5003]: I0104 12:11:55.986977 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 04 12:11:55 crc kubenswrapper[5003]: I0104 12:11:55.989083 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 04 12:11:56 crc kubenswrapper[5003]: I0104 12:11:56.013483 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Jan 04 12:11:56 crc kubenswrapper[5003]: I0104 12:11:56.013820 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Jan 04 12:11:56 crc kubenswrapper[5003]: I0104 12:11:56.081423 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zmjl\" (UniqueName: \"kubernetes.io/projected/90ff3d24-4688-4c94-b5d6-c408e74c28ae-kube-api-access-5zmjl\") pod \"kube-state-metrics-0\" (UID: \"90ff3d24-4688-4c94-b5d6-c408e74c28ae\") " pod="openstack/kube-state-metrics-0" Jan 04 12:11:56 crc kubenswrapper[5003]: I0104 12:11:56.081513 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90ff3d24-4688-4c94-b5d6-c408e74c28ae-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"90ff3d24-4688-4c94-b5d6-c408e74c28ae\") " pod="openstack/kube-state-metrics-0" Jan 04 12:11:56 crc kubenswrapper[5003]: I0104 12:11:56.081559 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/90ff3d24-4688-4c94-b5d6-c408e74c28ae-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"90ff3d24-4688-4c94-b5d6-c408e74c28ae\") " pod="openstack/kube-state-metrics-0" Jan 04 12:11:56 crc kubenswrapper[5003]: I0104 12:11:56.081671 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/90ff3d24-4688-4c94-b5d6-c408e74c28ae-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"90ff3d24-4688-4c94-b5d6-c408e74c28ae\") " pod="openstack/kube-state-metrics-0" Jan 04 12:11:56 crc kubenswrapper[5003]: I0104 12:11:56.183736 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zmjl\" (UniqueName: \"kubernetes.io/projected/90ff3d24-4688-4c94-b5d6-c408e74c28ae-kube-api-access-5zmjl\") pod \"kube-state-metrics-0\" (UID: \"90ff3d24-4688-4c94-b5d6-c408e74c28ae\") " pod="openstack/kube-state-metrics-0" Jan 04 12:11:56 crc kubenswrapper[5003]: I0104 12:11:56.183805 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90ff3d24-4688-4c94-b5d6-c408e74c28ae-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"90ff3d24-4688-4c94-b5d6-c408e74c28ae\") " pod="openstack/kube-state-metrics-0" Jan 04 12:11:56 crc kubenswrapper[5003]: I0104 12:11:56.183837 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/90ff3d24-4688-4c94-b5d6-c408e74c28ae-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"90ff3d24-4688-4c94-b5d6-c408e74c28ae\") " pod="openstack/kube-state-metrics-0" Jan 04 12:11:56 crc kubenswrapper[5003]: I0104 12:11:56.183920 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/90ff3d24-4688-4c94-b5d6-c408e74c28ae-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"90ff3d24-4688-4c94-b5d6-c408e74c28ae\") " pod="openstack/kube-state-metrics-0" Jan 04 12:11:56 crc kubenswrapper[5003]: I0104 12:11:56.187922 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/90ff3d24-4688-4c94-b5d6-c408e74c28ae-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"90ff3d24-4688-4c94-b5d6-c408e74c28ae\") " pod="openstack/kube-state-metrics-0" Jan 04 12:11:56 crc kubenswrapper[5003]: I0104 12:11:56.188488 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/90ff3d24-4688-4c94-b5d6-c408e74c28ae-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"90ff3d24-4688-4c94-b5d6-c408e74c28ae\") " pod="openstack/kube-state-metrics-0" Jan 04 12:11:56 crc kubenswrapper[5003]: I0104 12:11:56.194035 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90ff3d24-4688-4c94-b5d6-c408e74c28ae-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"90ff3d24-4688-4c94-b5d6-c408e74c28ae\") " pod="openstack/kube-state-metrics-0" Jan 04 12:11:56 crc kubenswrapper[5003]: I0104 12:11:56.204803 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zmjl\" (UniqueName: \"kubernetes.io/projected/90ff3d24-4688-4c94-b5d6-c408e74c28ae-kube-api-access-5zmjl\") pod \"kube-state-metrics-0\" (UID: \"90ff3d24-4688-4c94-b5d6-c408e74c28ae\") " pod="openstack/kube-state-metrics-0" Jan 04 12:11:56 crc kubenswrapper[5003]: I0104 12:11:56.209219 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="318a10f9-0b3b-46c4-a3d7-978d795b67e4" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.195:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 04 12:11:56 crc kubenswrapper[5003]: I0104 12:11:56.209228 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="318a10f9-0b3b-46c4-a3d7-978d795b67e4" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.195:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 04 12:11:56 crc kubenswrapper[5003]: I0104 12:11:56.371412 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 04 12:11:56 crc kubenswrapper[5003]: I0104 12:11:56.817450 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df21c365-a2e2-4a85-8de8-f132fd605981" path="/var/lib/kubelet/pods/df21c365-a2e2-4a85-8de8-f132fd605981/volumes" Jan 04 12:11:56 crc kubenswrapper[5003]: I0104 12:11:56.855194 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 04 12:11:56 crc kubenswrapper[5003]: W0104 12:11:56.861513 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod90ff3d24_4688_4c94_b5d6_c408e74c28ae.slice/crio-df735e508babebf9dfbafd295f4dda02b9dba20e9b3495a9eb9324e89907ed40 WatchSource:0}: Error finding container df735e508babebf9dfbafd295f4dda02b9dba20e9b3495a9eb9324e89907ed40: Status 404 returned error can't find the container with id df735e508babebf9dfbafd295f4dda02b9dba20e9b3495a9eb9324e89907ed40 Jan 04 12:11:57 crc kubenswrapper[5003]: I0104 12:11:57.400306 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:11:57 crc kubenswrapper[5003]: I0104 12:11:57.401000 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c05451f7-8c2b-4728-8e6d-d9956df16ebd" containerName="ceilometer-central-agent" containerID="cri-o://19831bc82aedf4a5a68057a966e101ef67c5e5bdc7fef9013287957680888035" gracePeriod=30 Jan 04 12:11:57 crc kubenswrapper[5003]: I0104 12:11:57.401091 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c05451f7-8c2b-4728-8e6d-d9956df16ebd" containerName="proxy-httpd" containerID="cri-o://5337a9074cbf90bdff1ef89a2e21dcda111864feaf03c542a0f159064bffe1c9" gracePeriod=30 Jan 04 12:11:57 crc kubenswrapper[5003]: I0104 12:11:57.401167 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c05451f7-8c2b-4728-8e6d-d9956df16ebd" containerName="sg-core" containerID="cri-o://b3b378c788720fc3878b3fcf89010faf136469ee2a059141b5912b5a975fb5d5" gracePeriod=30 Jan 04 12:11:57 crc kubenswrapper[5003]: I0104 12:11:57.401139 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c05451f7-8c2b-4728-8e6d-d9956df16ebd" containerName="ceilometer-notification-agent" containerID="cri-o://5de1ebf0b9b9ce6b86770e193625bfa19dfa1e0a25a64985812f99aa5f3cee1a" gracePeriod=30 Jan 04 12:11:57 crc kubenswrapper[5003]: I0104 12:11:57.803708 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"90ff3d24-4688-4c94-b5d6-c408e74c28ae","Type":"ContainerStarted","Data":"db90af2bbb86e3f491229ad487c735393946380d939a9f6fd713ad64285edc75"} Jan 04 12:11:57 crc kubenswrapper[5003]: I0104 12:11:57.803766 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"90ff3d24-4688-4c94-b5d6-c408e74c28ae","Type":"ContainerStarted","Data":"df735e508babebf9dfbafd295f4dda02b9dba20e9b3495a9eb9324e89907ed40"} Jan 04 12:11:57 crc kubenswrapper[5003]: I0104 12:11:57.804835 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Jan 04 12:11:57 crc kubenswrapper[5003]: I0104 12:11:57.808553 5003 generic.go:334] "Generic (PLEG): container finished" podID="c05451f7-8c2b-4728-8e6d-d9956df16ebd" containerID="5337a9074cbf90bdff1ef89a2e21dcda111864feaf03c542a0f159064bffe1c9" exitCode=0 Jan 04 12:11:57 crc kubenswrapper[5003]: I0104 12:11:57.808593 5003 generic.go:334] "Generic (PLEG): container finished" podID="c05451f7-8c2b-4728-8e6d-d9956df16ebd" containerID="b3b378c788720fc3878b3fcf89010faf136469ee2a059141b5912b5a975fb5d5" exitCode=2 Jan 04 12:11:57 crc kubenswrapper[5003]: I0104 12:11:57.808621 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c05451f7-8c2b-4728-8e6d-d9956df16ebd","Type":"ContainerDied","Data":"5337a9074cbf90bdff1ef89a2e21dcda111864feaf03c542a0f159064bffe1c9"} Jan 04 12:11:57 crc kubenswrapper[5003]: I0104 12:11:57.808651 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c05451f7-8c2b-4728-8e6d-d9956df16ebd","Type":"ContainerDied","Data":"b3b378c788720fc3878b3fcf89010faf136469ee2a059141b5912b5a975fb5d5"} Jan 04 12:11:57 crc kubenswrapper[5003]: I0104 12:11:57.833931 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.498472147 podStartE2EDuration="2.833901864s" podCreationTimestamp="2026-01-04 12:11:55 +0000 UTC" firstStartedPulling="2026-01-04 12:11:56.865880334 +0000 UTC m=+1432.338910165" lastFinishedPulling="2026-01-04 12:11:57.201310051 +0000 UTC m=+1432.674339882" observedRunningTime="2026-01-04 12:11:57.821354256 +0000 UTC m=+1433.294384117" watchObservedRunningTime="2026-01-04 12:11:57.833901864 +0000 UTC m=+1433.306931705" Jan 04 12:11:58 crc kubenswrapper[5003]: I0104 12:11:58.446902 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 04 12:11:58 crc kubenswrapper[5003]: I0104 12:11:58.446964 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 04 12:11:58 crc kubenswrapper[5003]: I0104 12:11:58.832639 5003 generic.go:334] "Generic (PLEG): container finished" podID="c05451f7-8c2b-4728-8e6d-d9956df16ebd" containerID="19831bc82aedf4a5a68057a966e101ef67c5e5bdc7fef9013287957680888035" exitCode=0 Jan 04 12:11:58 crc kubenswrapper[5003]: I0104 12:11:58.832783 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c05451f7-8c2b-4728-8e6d-d9956df16ebd","Type":"ContainerDied","Data":"19831bc82aedf4a5a68057a966e101ef67c5e5bdc7fef9013287957680888035"} Jan 04 12:11:59 crc kubenswrapper[5003]: I0104 12:11:59.163987 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 04 12:11:59 crc kubenswrapper[5003]: I0104 12:11:59.197159 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 04 12:11:59 crc kubenswrapper[5003]: I0104 12:11:59.530234 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="2f6f0f96-f5e0-4941-ae0e-61300c0709f0" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.196:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 04 12:11:59 crc kubenswrapper[5003]: I0104 12:11:59.530253 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="2f6f0f96-f5e0-4941-ae0e-61300c0709f0" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.196:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 04 12:11:59 crc kubenswrapper[5003]: I0104 12:11:59.873196 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.362987 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.488963 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c05451f7-8c2b-4728-8e6d-d9956df16ebd-scripts\") pod \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\" (UID: \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\") " Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.489103 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c05451f7-8c2b-4728-8e6d-d9956df16ebd-run-httpd\") pod \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\" (UID: \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\") " Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.489131 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c05451f7-8c2b-4728-8e6d-d9956df16ebd-combined-ca-bundle\") pod \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\" (UID: \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\") " Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.489169 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c05451f7-8c2b-4728-8e6d-d9956df16ebd-sg-core-conf-yaml\") pod \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\" (UID: \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\") " Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.489249 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c05451f7-8c2b-4728-8e6d-d9956df16ebd-log-httpd\") pod \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\" (UID: \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\") " Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.489398 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zfn4j\" (UniqueName: \"kubernetes.io/projected/c05451f7-8c2b-4728-8e6d-d9956df16ebd-kube-api-access-zfn4j\") pod \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\" (UID: \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\") " Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.489465 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c05451f7-8c2b-4728-8e6d-d9956df16ebd-config-data\") pod \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\" (UID: \"c05451f7-8c2b-4728-8e6d-d9956df16ebd\") " Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.489919 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c05451f7-8c2b-4728-8e6d-d9956df16ebd-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c05451f7-8c2b-4728-8e6d-d9956df16ebd" (UID: "c05451f7-8c2b-4728-8e6d-d9956df16ebd"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.489952 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c05451f7-8c2b-4728-8e6d-d9956df16ebd-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c05451f7-8c2b-4728-8e6d-d9956df16ebd" (UID: "c05451f7-8c2b-4728-8e6d-d9956df16ebd"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.512249 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c05451f7-8c2b-4728-8e6d-d9956df16ebd-kube-api-access-zfn4j" (OuterVolumeSpecName: "kube-api-access-zfn4j") pod "c05451f7-8c2b-4728-8e6d-d9956df16ebd" (UID: "c05451f7-8c2b-4728-8e6d-d9956df16ebd"). InnerVolumeSpecName "kube-api-access-zfn4j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.515212 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c05451f7-8c2b-4728-8e6d-d9956df16ebd-scripts" (OuterVolumeSpecName: "scripts") pod "c05451f7-8c2b-4728-8e6d-d9956df16ebd" (UID: "c05451f7-8c2b-4728-8e6d-d9956df16ebd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.568378 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c05451f7-8c2b-4728-8e6d-d9956df16ebd-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c05451f7-8c2b-4728-8e6d-d9956df16ebd" (UID: "c05451f7-8c2b-4728-8e6d-d9956df16ebd"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.591984 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zfn4j\" (UniqueName: \"kubernetes.io/projected/c05451f7-8c2b-4728-8e6d-d9956df16ebd-kube-api-access-zfn4j\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.592041 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c05451f7-8c2b-4728-8e6d-d9956df16ebd-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.592055 5003 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c05451f7-8c2b-4728-8e6d-d9956df16ebd-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.592067 5003 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c05451f7-8c2b-4728-8e6d-d9956df16ebd-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.592078 5003 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c05451f7-8c2b-4728-8e6d-d9956df16ebd-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.592556 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c05451f7-8c2b-4728-8e6d-d9956df16ebd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c05451f7-8c2b-4728-8e6d-d9956df16ebd" (UID: "c05451f7-8c2b-4728-8e6d-d9956df16ebd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.633811 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c05451f7-8c2b-4728-8e6d-d9956df16ebd-config-data" (OuterVolumeSpecName: "config-data") pod "c05451f7-8c2b-4728-8e6d-d9956df16ebd" (UID: "c05451f7-8c2b-4728-8e6d-d9956df16ebd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.693694 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c05451f7-8c2b-4728-8e6d-d9956df16ebd-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.693721 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c05451f7-8c2b-4728-8e6d-d9956df16ebd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.857793 5003 generic.go:334] "Generic (PLEG): container finished" podID="c05451f7-8c2b-4728-8e6d-d9956df16ebd" containerID="5de1ebf0b9b9ce6b86770e193625bfa19dfa1e0a25a64985812f99aa5f3cee1a" exitCode=0 Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.857844 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c05451f7-8c2b-4728-8e6d-d9956df16ebd","Type":"ContainerDied","Data":"5de1ebf0b9b9ce6b86770e193625bfa19dfa1e0a25a64985812f99aa5f3cee1a"} Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.857875 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.857931 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c05451f7-8c2b-4728-8e6d-d9956df16ebd","Type":"ContainerDied","Data":"796de35bb84521a8afb378e3c32ef03571092717776b626d734f48b77949cba2"} Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.857951 5003 scope.go:117] "RemoveContainer" containerID="5337a9074cbf90bdff1ef89a2e21dcda111864feaf03c542a0f159064bffe1c9" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.886359 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.889661 5003 scope.go:117] "RemoveContainer" containerID="b3b378c788720fc3878b3fcf89010faf136469ee2a059141b5912b5a975fb5d5" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.895409 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.912970 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:12:00 crc kubenswrapper[5003]: E0104 12:12:00.913601 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c05451f7-8c2b-4728-8e6d-d9956df16ebd" containerName="ceilometer-notification-agent" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.913665 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c05451f7-8c2b-4728-8e6d-d9956df16ebd" containerName="ceilometer-notification-agent" Jan 04 12:12:00 crc kubenswrapper[5003]: E0104 12:12:00.913763 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c05451f7-8c2b-4728-8e6d-d9956df16ebd" containerName="ceilometer-central-agent" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.913817 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c05451f7-8c2b-4728-8e6d-d9956df16ebd" containerName="ceilometer-central-agent" Jan 04 12:12:00 crc kubenswrapper[5003]: E0104 12:12:00.913915 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c05451f7-8c2b-4728-8e6d-d9956df16ebd" containerName="sg-core" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.913968 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c05451f7-8c2b-4728-8e6d-d9956df16ebd" containerName="sg-core" Jan 04 12:12:00 crc kubenswrapper[5003]: E0104 12:12:00.914042 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c05451f7-8c2b-4728-8e6d-d9956df16ebd" containerName="proxy-httpd" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.914106 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c05451f7-8c2b-4728-8e6d-d9956df16ebd" containerName="proxy-httpd" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.914358 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="c05451f7-8c2b-4728-8e6d-d9956df16ebd" containerName="ceilometer-central-agent" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.914426 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="c05451f7-8c2b-4728-8e6d-d9956df16ebd" containerName="ceilometer-notification-agent" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.914488 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="c05451f7-8c2b-4728-8e6d-d9956df16ebd" containerName="proxy-httpd" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.914545 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="c05451f7-8c2b-4728-8e6d-d9956df16ebd" containerName="sg-core" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.917027 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.919697 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.919978 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.920203 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.935665 5003 scope.go:117] "RemoveContainer" containerID="5de1ebf0b9b9ce6b86770e193625bfa19dfa1e0a25a64985812f99aa5f3cee1a" Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.944263 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:12:00 crc kubenswrapper[5003]: I0104 12:12:00.975084 5003 scope.go:117] "RemoveContainer" containerID="19831bc82aedf4a5a68057a966e101ef67c5e5bdc7fef9013287957680888035" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.000313 5003 scope.go:117] "RemoveContainer" containerID="5337a9074cbf90bdff1ef89a2e21dcda111864feaf03c542a0f159064bffe1c9" Jan 04 12:12:01 crc kubenswrapper[5003]: E0104 12:12:01.000963 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5337a9074cbf90bdff1ef89a2e21dcda111864feaf03c542a0f159064bffe1c9\": container with ID starting with 5337a9074cbf90bdff1ef89a2e21dcda111864feaf03c542a0f159064bffe1c9 not found: ID does not exist" containerID="5337a9074cbf90bdff1ef89a2e21dcda111864feaf03c542a0f159064bffe1c9" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.001058 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5337a9074cbf90bdff1ef89a2e21dcda111864feaf03c542a0f159064bffe1c9"} err="failed to get container status \"5337a9074cbf90bdff1ef89a2e21dcda111864feaf03c542a0f159064bffe1c9\": rpc error: code = NotFound desc = could not find container \"5337a9074cbf90bdff1ef89a2e21dcda111864feaf03c542a0f159064bffe1c9\": container with ID starting with 5337a9074cbf90bdff1ef89a2e21dcda111864feaf03c542a0f159064bffe1c9 not found: ID does not exist" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.001093 5003 scope.go:117] "RemoveContainer" containerID="b3b378c788720fc3878b3fcf89010faf136469ee2a059141b5912b5a975fb5d5" Jan 04 12:12:01 crc kubenswrapper[5003]: E0104 12:12:01.001357 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3b378c788720fc3878b3fcf89010faf136469ee2a059141b5912b5a975fb5d5\": container with ID starting with b3b378c788720fc3878b3fcf89010faf136469ee2a059141b5912b5a975fb5d5 not found: ID does not exist" containerID="b3b378c788720fc3878b3fcf89010faf136469ee2a059141b5912b5a975fb5d5" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.001380 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3b378c788720fc3878b3fcf89010faf136469ee2a059141b5912b5a975fb5d5"} err="failed to get container status \"b3b378c788720fc3878b3fcf89010faf136469ee2a059141b5912b5a975fb5d5\": rpc error: code = NotFound desc = could not find container \"b3b378c788720fc3878b3fcf89010faf136469ee2a059141b5912b5a975fb5d5\": container with ID starting with b3b378c788720fc3878b3fcf89010faf136469ee2a059141b5912b5a975fb5d5 not found: ID does not exist" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.001399 5003 scope.go:117] "RemoveContainer" containerID="5de1ebf0b9b9ce6b86770e193625bfa19dfa1e0a25a64985812f99aa5f3cee1a" Jan 04 12:12:01 crc kubenswrapper[5003]: E0104 12:12:01.001616 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5de1ebf0b9b9ce6b86770e193625bfa19dfa1e0a25a64985812f99aa5f3cee1a\": container with ID starting with 5de1ebf0b9b9ce6b86770e193625bfa19dfa1e0a25a64985812f99aa5f3cee1a not found: ID does not exist" containerID="5de1ebf0b9b9ce6b86770e193625bfa19dfa1e0a25a64985812f99aa5f3cee1a" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.001635 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5de1ebf0b9b9ce6b86770e193625bfa19dfa1e0a25a64985812f99aa5f3cee1a"} err="failed to get container status \"5de1ebf0b9b9ce6b86770e193625bfa19dfa1e0a25a64985812f99aa5f3cee1a\": rpc error: code = NotFound desc = could not find container \"5de1ebf0b9b9ce6b86770e193625bfa19dfa1e0a25a64985812f99aa5f3cee1a\": container with ID starting with 5de1ebf0b9b9ce6b86770e193625bfa19dfa1e0a25a64985812f99aa5f3cee1a not found: ID does not exist" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.001647 5003 scope.go:117] "RemoveContainer" containerID="19831bc82aedf4a5a68057a966e101ef67c5e5bdc7fef9013287957680888035" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.001688 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4c92d74-120a-4b34-9792-3508cfcd588b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d4c92d74-120a-4b34-9792-3508cfcd588b\") " pod="openstack/ceilometer-0" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.001762 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4c92d74-120a-4b34-9792-3508cfcd588b-log-httpd\") pod \"ceilometer-0\" (UID: \"d4c92d74-120a-4b34-9792-3508cfcd588b\") " pod="openstack/ceilometer-0" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.001789 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4c92d74-120a-4b34-9792-3508cfcd588b-config-data\") pod \"ceilometer-0\" (UID: \"d4c92d74-120a-4b34-9792-3508cfcd588b\") " pod="openstack/ceilometer-0" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.001879 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frvkm\" (UniqueName: \"kubernetes.io/projected/d4c92d74-120a-4b34-9792-3508cfcd588b-kube-api-access-frvkm\") pod \"ceilometer-0\" (UID: \"d4c92d74-120a-4b34-9792-3508cfcd588b\") " pod="openstack/ceilometer-0" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.001898 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4c92d74-120a-4b34-9792-3508cfcd588b-scripts\") pod \"ceilometer-0\" (UID: \"d4c92d74-120a-4b34-9792-3508cfcd588b\") " pod="openstack/ceilometer-0" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.001926 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4c92d74-120a-4b34-9792-3508cfcd588b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d4c92d74-120a-4b34-9792-3508cfcd588b\") " pod="openstack/ceilometer-0" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.001947 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4c92d74-120a-4b34-9792-3508cfcd588b-run-httpd\") pod \"ceilometer-0\" (UID: \"d4c92d74-120a-4b34-9792-3508cfcd588b\") " pod="openstack/ceilometer-0" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.001969 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d4c92d74-120a-4b34-9792-3508cfcd588b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d4c92d74-120a-4b34-9792-3508cfcd588b\") " pod="openstack/ceilometer-0" Jan 04 12:12:01 crc kubenswrapper[5003]: E0104 12:12:01.002127 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19831bc82aedf4a5a68057a966e101ef67c5e5bdc7fef9013287957680888035\": container with ID starting with 19831bc82aedf4a5a68057a966e101ef67c5e5bdc7fef9013287957680888035 not found: ID does not exist" containerID="19831bc82aedf4a5a68057a966e101ef67c5e5bdc7fef9013287957680888035" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.002145 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19831bc82aedf4a5a68057a966e101ef67c5e5bdc7fef9013287957680888035"} err="failed to get container status \"19831bc82aedf4a5a68057a966e101ef67c5e5bdc7fef9013287957680888035\": rpc error: code = NotFound desc = could not find container \"19831bc82aedf4a5a68057a966e101ef67c5e5bdc7fef9013287957680888035\": container with ID starting with 19831bc82aedf4a5a68057a966e101ef67c5e5bdc7fef9013287957680888035 not found: ID does not exist" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.103508 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4c92d74-120a-4b34-9792-3508cfcd588b-scripts\") pod \"ceilometer-0\" (UID: \"d4c92d74-120a-4b34-9792-3508cfcd588b\") " pod="openstack/ceilometer-0" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.103564 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4c92d74-120a-4b34-9792-3508cfcd588b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d4c92d74-120a-4b34-9792-3508cfcd588b\") " pod="openstack/ceilometer-0" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.103586 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4c92d74-120a-4b34-9792-3508cfcd588b-run-httpd\") pod \"ceilometer-0\" (UID: \"d4c92d74-120a-4b34-9792-3508cfcd588b\") " pod="openstack/ceilometer-0" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.103609 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d4c92d74-120a-4b34-9792-3508cfcd588b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d4c92d74-120a-4b34-9792-3508cfcd588b\") " pod="openstack/ceilometer-0" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.103696 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4c92d74-120a-4b34-9792-3508cfcd588b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d4c92d74-120a-4b34-9792-3508cfcd588b\") " pod="openstack/ceilometer-0" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.103730 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4c92d74-120a-4b34-9792-3508cfcd588b-log-httpd\") pod \"ceilometer-0\" (UID: \"d4c92d74-120a-4b34-9792-3508cfcd588b\") " pod="openstack/ceilometer-0" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.103747 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4c92d74-120a-4b34-9792-3508cfcd588b-config-data\") pod \"ceilometer-0\" (UID: \"d4c92d74-120a-4b34-9792-3508cfcd588b\") " pod="openstack/ceilometer-0" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.103797 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frvkm\" (UniqueName: \"kubernetes.io/projected/d4c92d74-120a-4b34-9792-3508cfcd588b-kube-api-access-frvkm\") pod \"ceilometer-0\" (UID: \"d4c92d74-120a-4b34-9792-3508cfcd588b\") " pod="openstack/ceilometer-0" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.105317 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4c92d74-120a-4b34-9792-3508cfcd588b-log-httpd\") pod \"ceilometer-0\" (UID: \"d4c92d74-120a-4b34-9792-3508cfcd588b\") " pod="openstack/ceilometer-0" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.105685 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4c92d74-120a-4b34-9792-3508cfcd588b-run-httpd\") pod \"ceilometer-0\" (UID: \"d4c92d74-120a-4b34-9792-3508cfcd588b\") " pod="openstack/ceilometer-0" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.108622 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4c92d74-120a-4b34-9792-3508cfcd588b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d4c92d74-120a-4b34-9792-3508cfcd588b\") " pod="openstack/ceilometer-0" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.108671 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4c92d74-120a-4b34-9792-3508cfcd588b-scripts\") pod \"ceilometer-0\" (UID: \"d4c92d74-120a-4b34-9792-3508cfcd588b\") " pod="openstack/ceilometer-0" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.109483 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4c92d74-120a-4b34-9792-3508cfcd588b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d4c92d74-120a-4b34-9792-3508cfcd588b\") " pod="openstack/ceilometer-0" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.111957 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4c92d74-120a-4b34-9792-3508cfcd588b-config-data\") pod \"ceilometer-0\" (UID: \"d4c92d74-120a-4b34-9792-3508cfcd588b\") " pod="openstack/ceilometer-0" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.117441 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d4c92d74-120a-4b34-9792-3508cfcd588b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d4c92d74-120a-4b34-9792-3508cfcd588b\") " pod="openstack/ceilometer-0" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.120800 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frvkm\" (UniqueName: \"kubernetes.io/projected/d4c92d74-120a-4b34-9792-3508cfcd588b-kube-api-access-frvkm\") pod \"ceilometer-0\" (UID: \"d4c92d74-120a-4b34-9792-3508cfcd588b\") " pod="openstack/ceilometer-0" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.237647 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.719085 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:12:01 crc kubenswrapper[5003]: W0104 12:12:01.723173 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd4c92d74_120a_4b34_9792_3508cfcd588b.slice/crio-21c03817ae1b63d5adbcebd0eefbb347e11ee8ea52cbe921e591aad47e09722c WatchSource:0}: Error finding container 21c03817ae1b63d5adbcebd0eefbb347e11ee8ea52cbe921e591aad47e09722c: Status 404 returned error can't find the container with id 21c03817ae1b63d5adbcebd0eefbb347e11ee8ea52cbe921e591aad47e09722c Jan 04 12:12:01 crc kubenswrapper[5003]: I0104 12:12:01.876073 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4c92d74-120a-4b34-9792-3508cfcd588b","Type":"ContainerStarted","Data":"21c03817ae1b63d5adbcebd0eefbb347e11ee8ea52cbe921e591aad47e09722c"} Jan 04 12:12:02 crc kubenswrapper[5003]: I0104 12:12:02.818070 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c05451f7-8c2b-4728-8e6d-d9956df16ebd" path="/var/lib/kubelet/pods/c05451f7-8c2b-4728-8e6d-d9956df16ebd/volumes" Jan 04 12:12:02 crc kubenswrapper[5003]: I0104 12:12:02.885631 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4c92d74-120a-4b34-9792-3508cfcd588b","Type":"ContainerStarted","Data":"604b321a0e329f5747aee34bdf500c968c0f6c5d55a69074e18de67d06a4ad83"} Jan 04 12:12:03 crc kubenswrapper[5003]: I0104 12:12:03.896810 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4c92d74-120a-4b34-9792-3508cfcd588b","Type":"ContainerStarted","Data":"1a3bb4cdbc1251124253a6625a3d33227d76f13d264530dd23fa9b13f8e3f17d"} Jan 04 12:12:05 crc kubenswrapper[5003]: I0104 12:12:05.210100 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 04 12:12:05 crc kubenswrapper[5003]: I0104 12:12:05.212063 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 04 12:12:05 crc kubenswrapper[5003]: I0104 12:12:05.219715 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 04 12:12:05 crc kubenswrapper[5003]: I0104 12:12:05.918047 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4c92d74-120a-4b34-9792-3508cfcd588b","Type":"ContainerStarted","Data":"b4e752e48482fdcecc8354cd6dd09f747779acbec0452a9675cffe27bb13838e"} Jan 04 12:12:05 crc kubenswrapper[5003]: I0104 12:12:05.923832 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 04 12:12:06 crc kubenswrapper[5003]: I0104 12:12:06.396637 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Jan 04 12:12:08 crc kubenswrapper[5003]: I0104 12:12:08.455192 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 04 12:12:08 crc kubenswrapper[5003]: I0104 12:12:08.455628 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 04 12:12:08 crc kubenswrapper[5003]: I0104 12:12:08.456415 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 04 12:12:08 crc kubenswrapper[5003]: I0104 12:12:08.456443 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 04 12:12:08 crc kubenswrapper[5003]: I0104 12:12:08.461084 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 04 12:12:08 crc kubenswrapper[5003]: I0104 12:12:08.487103 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 04 12:12:08 crc kubenswrapper[5003]: I0104 12:12:08.749848 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-fcd6f8f8f-7kqd7"] Jan 04 12:12:08 crc kubenswrapper[5003]: I0104 12:12:08.765349 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fcd6f8f8f-7kqd7" Jan 04 12:12:08 crc kubenswrapper[5003]: I0104 12:12:08.776105 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fcd6f8f8f-7kqd7"] Jan 04 12:12:08 crc kubenswrapper[5003]: I0104 12:12:08.886404 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67adf34a-962a-435b-8e35-ae1387c097b6-config\") pod \"dnsmasq-dns-fcd6f8f8f-7kqd7\" (UID: \"67adf34a-962a-435b-8e35-ae1387c097b6\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-7kqd7" Jan 04 12:12:08 crc kubenswrapper[5003]: I0104 12:12:08.886539 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/67adf34a-962a-435b-8e35-ae1387c097b6-ovsdbserver-nb\") pod \"dnsmasq-dns-fcd6f8f8f-7kqd7\" (UID: \"67adf34a-962a-435b-8e35-ae1387c097b6\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-7kqd7" Jan 04 12:12:08 crc kubenswrapper[5003]: I0104 12:12:08.886593 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/67adf34a-962a-435b-8e35-ae1387c097b6-dns-swift-storage-0\") pod \"dnsmasq-dns-fcd6f8f8f-7kqd7\" (UID: \"67adf34a-962a-435b-8e35-ae1387c097b6\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-7kqd7" Jan 04 12:12:08 crc kubenswrapper[5003]: I0104 12:12:08.886634 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6mxl\" (UniqueName: \"kubernetes.io/projected/67adf34a-962a-435b-8e35-ae1387c097b6-kube-api-access-x6mxl\") pod \"dnsmasq-dns-fcd6f8f8f-7kqd7\" (UID: \"67adf34a-962a-435b-8e35-ae1387c097b6\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-7kqd7" Jan 04 12:12:08 crc kubenswrapper[5003]: I0104 12:12:08.886648 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/67adf34a-962a-435b-8e35-ae1387c097b6-ovsdbserver-sb\") pod \"dnsmasq-dns-fcd6f8f8f-7kqd7\" (UID: \"67adf34a-962a-435b-8e35-ae1387c097b6\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-7kqd7" Jan 04 12:12:08 crc kubenswrapper[5003]: I0104 12:12:08.886683 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/67adf34a-962a-435b-8e35-ae1387c097b6-dns-svc\") pod \"dnsmasq-dns-fcd6f8f8f-7kqd7\" (UID: \"67adf34a-962a-435b-8e35-ae1387c097b6\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-7kqd7" Jan 04 12:12:08 crc kubenswrapper[5003]: I0104 12:12:08.950617 5003 generic.go:334] "Generic (PLEG): container finished" podID="10ac1247-3e98-41cd-84f5-52112345b993" containerID="4c39555e2f186942918165550947f8c57a1da363df377475bb6d2d0672fd5ebd" exitCode=137 Jan 04 12:12:08 crc kubenswrapper[5003]: I0104 12:12:08.951965 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"10ac1247-3e98-41cd-84f5-52112345b993","Type":"ContainerDied","Data":"4c39555e2f186942918165550947f8c57a1da363df377475bb6d2d0672fd5ebd"} Jan 04 12:12:08 crc kubenswrapper[5003]: I0104 12:12:08.988462 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67adf34a-962a-435b-8e35-ae1387c097b6-config\") pod \"dnsmasq-dns-fcd6f8f8f-7kqd7\" (UID: \"67adf34a-962a-435b-8e35-ae1387c097b6\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-7kqd7" Jan 04 12:12:08 crc kubenswrapper[5003]: I0104 12:12:08.988591 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/67adf34a-962a-435b-8e35-ae1387c097b6-ovsdbserver-nb\") pod \"dnsmasq-dns-fcd6f8f8f-7kqd7\" (UID: \"67adf34a-962a-435b-8e35-ae1387c097b6\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-7kqd7" Jan 04 12:12:08 crc kubenswrapper[5003]: I0104 12:12:08.988662 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/67adf34a-962a-435b-8e35-ae1387c097b6-dns-swift-storage-0\") pod \"dnsmasq-dns-fcd6f8f8f-7kqd7\" (UID: \"67adf34a-962a-435b-8e35-ae1387c097b6\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-7kqd7" Jan 04 12:12:08 crc kubenswrapper[5003]: I0104 12:12:08.988729 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/67adf34a-962a-435b-8e35-ae1387c097b6-ovsdbserver-sb\") pod \"dnsmasq-dns-fcd6f8f8f-7kqd7\" (UID: \"67adf34a-962a-435b-8e35-ae1387c097b6\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-7kqd7" Jan 04 12:12:08 crc kubenswrapper[5003]: I0104 12:12:08.988755 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6mxl\" (UniqueName: \"kubernetes.io/projected/67adf34a-962a-435b-8e35-ae1387c097b6-kube-api-access-x6mxl\") pod \"dnsmasq-dns-fcd6f8f8f-7kqd7\" (UID: \"67adf34a-962a-435b-8e35-ae1387c097b6\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-7kqd7" Jan 04 12:12:08 crc kubenswrapper[5003]: I0104 12:12:08.988807 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/67adf34a-962a-435b-8e35-ae1387c097b6-dns-svc\") pod \"dnsmasq-dns-fcd6f8f8f-7kqd7\" (UID: \"67adf34a-962a-435b-8e35-ae1387c097b6\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-7kqd7" Jan 04 12:12:08 crc kubenswrapper[5003]: I0104 12:12:08.989464 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67adf34a-962a-435b-8e35-ae1387c097b6-config\") pod \"dnsmasq-dns-fcd6f8f8f-7kqd7\" (UID: \"67adf34a-962a-435b-8e35-ae1387c097b6\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-7kqd7" Jan 04 12:12:08 crc kubenswrapper[5003]: I0104 12:12:08.990166 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/67adf34a-962a-435b-8e35-ae1387c097b6-dns-svc\") pod \"dnsmasq-dns-fcd6f8f8f-7kqd7\" (UID: \"67adf34a-962a-435b-8e35-ae1387c097b6\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-7kqd7" Jan 04 12:12:08 crc kubenswrapper[5003]: I0104 12:12:08.990565 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/67adf34a-962a-435b-8e35-ae1387c097b6-dns-swift-storage-0\") pod \"dnsmasq-dns-fcd6f8f8f-7kqd7\" (UID: \"67adf34a-962a-435b-8e35-ae1387c097b6\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-7kqd7" Jan 04 12:12:08 crc kubenswrapper[5003]: I0104 12:12:08.990788 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/67adf34a-962a-435b-8e35-ae1387c097b6-ovsdbserver-sb\") pod \"dnsmasq-dns-fcd6f8f8f-7kqd7\" (UID: \"67adf34a-962a-435b-8e35-ae1387c097b6\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-7kqd7" Jan 04 12:12:08 crc kubenswrapper[5003]: I0104 12:12:08.990973 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/67adf34a-962a-435b-8e35-ae1387c097b6-ovsdbserver-nb\") pod \"dnsmasq-dns-fcd6f8f8f-7kqd7\" (UID: \"67adf34a-962a-435b-8e35-ae1387c097b6\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-7kqd7" Jan 04 12:12:09 crc kubenswrapper[5003]: I0104 12:12:09.037376 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6mxl\" (UniqueName: \"kubernetes.io/projected/67adf34a-962a-435b-8e35-ae1387c097b6-kube-api-access-x6mxl\") pod \"dnsmasq-dns-fcd6f8f8f-7kqd7\" (UID: \"67adf34a-962a-435b-8e35-ae1387c097b6\") " pod="openstack/dnsmasq-dns-fcd6f8f8f-7kqd7" Jan 04 12:12:09 crc kubenswrapper[5003]: I0104 12:12:09.156511 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fcd6f8f8f-7kqd7" Jan 04 12:12:09 crc kubenswrapper[5003]: I0104 12:12:09.623201 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:12:09 crc kubenswrapper[5003]: I0104 12:12:09.703048 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10ac1247-3e98-41cd-84f5-52112345b993-config-data\") pod \"10ac1247-3e98-41cd-84f5-52112345b993\" (UID: \"10ac1247-3e98-41cd-84f5-52112345b993\") " Jan 04 12:12:09 crc kubenswrapper[5003]: I0104 12:12:09.703476 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-47bjd\" (UniqueName: \"kubernetes.io/projected/10ac1247-3e98-41cd-84f5-52112345b993-kube-api-access-47bjd\") pod \"10ac1247-3e98-41cd-84f5-52112345b993\" (UID: \"10ac1247-3e98-41cd-84f5-52112345b993\") " Jan 04 12:12:09 crc kubenswrapper[5003]: I0104 12:12:09.703577 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10ac1247-3e98-41cd-84f5-52112345b993-combined-ca-bundle\") pod \"10ac1247-3e98-41cd-84f5-52112345b993\" (UID: \"10ac1247-3e98-41cd-84f5-52112345b993\") " Jan 04 12:12:09 crc kubenswrapper[5003]: I0104 12:12:09.710229 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10ac1247-3e98-41cd-84f5-52112345b993-kube-api-access-47bjd" (OuterVolumeSpecName: "kube-api-access-47bjd") pod "10ac1247-3e98-41cd-84f5-52112345b993" (UID: "10ac1247-3e98-41cd-84f5-52112345b993"). InnerVolumeSpecName "kube-api-access-47bjd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:09 crc kubenswrapper[5003]: I0104 12:12:09.750484 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10ac1247-3e98-41cd-84f5-52112345b993-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "10ac1247-3e98-41cd-84f5-52112345b993" (UID: "10ac1247-3e98-41cd-84f5-52112345b993"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:09 crc kubenswrapper[5003]: I0104 12:12:09.756105 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10ac1247-3e98-41cd-84f5-52112345b993-config-data" (OuterVolumeSpecName: "config-data") pod "10ac1247-3e98-41cd-84f5-52112345b993" (UID: "10ac1247-3e98-41cd-84f5-52112345b993"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:09 crc kubenswrapper[5003]: I0104 12:12:09.806763 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fcd6f8f8f-7kqd7"] Jan 04 12:12:09 crc kubenswrapper[5003]: I0104 12:12:09.807434 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10ac1247-3e98-41cd-84f5-52112345b993-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:09 crc kubenswrapper[5003]: I0104 12:12:09.807476 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-47bjd\" (UniqueName: \"kubernetes.io/projected/10ac1247-3e98-41cd-84f5-52112345b993-kube-api-access-47bjd\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:09 crc kubenswrapper[5003]: I0104 12:12:09.807492 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10ac1247-3e98-41cd-84f5-52112345b993-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:09 crc kubenswrapper[5003]: I0104 12:12:09.969968 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4c92d74-120a-4b34-9792-3508cfcd588b","Type":"ContainerStarted","Data":"9e48cd49daa2dc3d1dcc758d5b96d45f2b60846e815fe5b02b8b7243bb96f783"} Jan 04 12:12:09 crc kubenswrapper[5003]: I0104 12:12:09.972396 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 04 12:12:09 crc kubenswrapper[5003]: I0104 12:12:09.979359 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"10ac1247-3e98-41cd-84f5-52112345b993","Type":"ContainerDied","Data":"d46d7a3a379c33c7fe265a611d2f8fb23aa4a8aa22384dbeeb96985f82fea69c"} Jan 04 12:12:09 crc kubenswrapper[5003]: I0104 12:12:09.979435 5003 scope.go:117] "RemoveContainer" containerID="4c39555e2f186942918165550947f8c57a1da363df377475bb6d2d0672fd5ebd" Jan 04 12:12:09 crc kubenswrapper[5003]: I0104 12:12:09.979616 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:12:09 crc kubenswrapper[5003]: I0104 12:12:09.989340 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcd6f8f8f-7kqd7" event={"ID":"67adf34a-962a-435b-8e35-ae1387c097b6","Type":"ContainerStarted","Data":"2136a2ccc126a4468037da941d378d56e8f116bc364f4b4fb53b5f12f50d9d9d"} Jan 04 12:12:10 crc kubenswrapper[5003]: I0104 12:12:10.028339 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.425775524 podStartE2EDuration="10.028306749s" podCreationTimestamp="2026-01-04 12:12:00 +0000 UTC" firstStartedPulling="2026-01-04 12:12:01.725527323 +0000 UTC m=+1437.198557164" lastFinishedPulling="2026-01-04 12:12:09.328058548 +0000 UTC m=+1444.801088389" observedRunningTime="2026-01-04 12:12:09.994149786 +0000 UTC m=+1445.467179637" watchObservedRunningTime="2026-01-04 12:12:10.028306749 +0000 UTC m=+1445.501336590" Jan 04 12:12:10 crc kubenswrapper[5003]: I0104 12:12:10.056121 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 04 12:12:10 crc kubenswrapper[5003]: I0104 12:12:10.068097 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 04 12:12:10 crc kubenswrapper[5003]: I0104 12:12:10.077730 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 04 12:12:10 crc kubenswrapper[5003]: E0104 12:12:10.078503 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10ac1247-3e98-41cd-84f5-52112345b993" containerName="nova-cell1-novncproxy-novncproxy" Jan 04 12:12:10 crc kubenswrapper[5003]: I0104 12:12:10.078534 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="10ac1247-3e98-41cd-84f5-52112345b993" containerName="nova-cell1-novncproxy-novncproxy" Jan 04 12:12:10 crc kubenswrapper[5003]: I0104 12:12:10.078816 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="10ac1247-3e98-41cd-84f5-52112345b993" containerName="nova-cell1-novncproxy-novncproxy" Jan 04 12:12:10 crc kubenswrapper[5003]: I0104 12:12:10.079816 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:12:10 crc kubenswrapper[5003]: I0104 12:12:10.090129 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Jan 04 12:12:10 crc kubenswrapper[5003]: I0104 12:12:10.090372 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Jan 04 12:12:10 crc kubenswrapper[5003]: I0104 12:12:10.090373 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Jan 04 12:12:10 crc kubenswrapper[5003]: I0104 12:12:10.099244 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 04 12:12:10 crc kubenswrapper[5003]: I0104 12:12:10.221983 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/890e99fd-959d-4946-9716-acfe78278964-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"890e99fd-959d-4946-9716-acfe78278964\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:12:10 crc kubenswrapper[5003]: I0104 12:12:10.222069 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpdr6\" (UniqueName: \"kubernetes.io/projected/890e99fd-959d-4946-9716-acfe78278964-kube-api-access-dpdr6\") pod \"nova-cell1-novncproxy-0\" (UID: \"890e99fd-959d-4946-9716-acfe78278964\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:12:10 crc kubenswrapper[5003]: I0104 12:12:10.222104 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/890e99fd-959d-4946-9716-acfe78278964-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"890e99fd-959d-4946-9716-acfe78278964\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:12:10 crc kubenswrapper[5003]: I0104 12:12:10.222333 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/890e99fd-959d-4946-9716-acfe78278964-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"890e99fd-959d-4946-9716-acfe78278964\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:12:10 crc kubenswrapper[5003]: I0104 12:12:10.222536 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/890e99fd-959d-4946-9716-acfe78278964-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"890e99fd-959d-4946-9716-acfe78278964\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:12:10 crc kubenswrapper[5003]: I0104 12:12:10.324417 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/890e99fd-959d-4946-9716-acfe78278964-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"890e99fd-959d-4946-9716-acfe78278964\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:12:10 crc kubenswrapper[5003]: I0104 12:12:10.324495 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpdr6\" (UniqueName: \"kubernetes.io/projected/890e99fd-959d-4946-9716-acfe78278964-kube-api-access-dpdr6\") pod \"nova-cell1-novncproxy-0\" (UID: \"890e99fd-959d-4946-9716-acfe78278964\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:12:10 crc kubenswrapper[5003]: I0104 12:12:10.324535 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/890e99fd-959d-4946-9716-acfe78278964-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"890e99fd-959d-4946-9716-acfe78278964\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:12:10 crc kubenswrapper[5003]: I0104 12:12:10.324652 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/890e99fd-959d-4946-9716-acfe78278964-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"890e99fd-959d-4946-9716-acfe78278964\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:12:10 crc kubenswrapper[5003]: I0104 12:12:10.324707 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/890e99fd-959d-4946-9716-acfe78278964-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"890e99fd-959d-4946-9716-acfe78278964\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:12:10 crc kubenswrapper[5003]: I0104 12:12:10.330748 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/890e99fd-959d-4946-9716-acfe78278964-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"890e99fd-959d-4946-9716-acfe78278964\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:12:10 crc kubenswrapper[5003]: I0104 12:12:10.331204 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/890e99fd-959d-4946-9716-acfe78278964-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"890e99fd-959d-4946-9716-acfe78278964\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:12:10 crc kubenswrapper[5003]: I0104 12:12:10.333055 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/890e99fd-959d-4946-9716-acfe78278964-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"890e99fd-959d-4946-9716-acfe78278964\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:12:10 crc kubenswrapper[5003]: I0104 12:12:10.340268 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/890e99fd-959d-4946-9716-acfe78278964-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"890e99fd-959d-4946-9716-acfe78278964\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:12:10 crc kubenswrapper[5003]: I0104 12:12:10.344116 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpdr6\" (UniqueName: \"kubernetes.io/projected/890e99fd-959d-4946-9716-acfe78278964-kube-api-access-dpdr6\") pod \"nova-cell1-novncproxy-0\" (UID: \"890e99fd-959d-4946-9716-acfe78278964\") " pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:12:10 crc kubenswrapper[5003]: I0104 12:12:10.477914 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:12:10 crc kubenswrapper[5003]: I0104 12:12:10.826624 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10ac1247-3e98-41cd-84f5-52112345b993" path="/var/lib/kubelet/pods/10ac1247-3e98-41cd-84f5-52112345b993/volumes" Jan 04 12:12:11 crc kubenswrapper[5003]: I0104 12:12:11.007408 5003 generic.go:334] "Generic (PLEG): container finished" podID="67adf34a-962a-435b-8e35-ae1387c097b6" containerID="bf568fc5453059ee85c44af4c2ca1bb686f34300564315d4ba9cc8f2758524b2" exitCode=0 Jan 04 12:12:11 crc kubenswrapper[5003]: I0104 12:12:11.007698 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcd6f8f8f-7kqd7" event={"ID":"67adf34a-962a-435b-8e35-ae1387c097b6","Type":"ContainerDied","Data":"bf568fc5453059ee85c44af4c2ca1bb686f34300564315d4ba9cc8f2758524b2"} Jan 04 12:12:11 crc kubenswrapper[5003]: I0104 12:12:11.082658 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:12:11 crc kubenswrapper[5003]: I0104 12:12:11.208711 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 04 12:12:11 crc kubenswrapper[5003]: W0104 12:12:11.210349 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod890e99fd_959d_4946_9716_acfe78278964.slice/crio-b5ed1b408c59b4ef15d5def5aa2df873918de3c2767b9c6391eb16944810d1fc WatchSource:0}: Error finding container b5ed1b408c59b4ef15d5def5aa2df873918de3c2767b9c6391eb16944810d1fc: Status 404 returned error can't find the container with id b5ed1b408c59b4ef15d5def5aa2df873918de3c2767b9c6391eb16944810d1fc Jan 04 12:12:11 crc kubenswrapper[5003]: I0104 12:12:11.918775 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:12:11 crc kubenswrapper[5003]: I0104 12:12:11.919147 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="2f6f0f96-f5e0-4941-ae0e-61300c0709f0" containerName="nova-api-log" containerID="cri-o://d876c470bb5311460d153b107e11b2b7ee80d059a9b8a6c247efe3447855b19e" gracePeriod=30 Jan 04 12:12:11 crc kubenswrapper[5003]: I0104 12:12:11.919543 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="2f6f0f96-f5e0-4941-ae0e-61300c0709f0" containerName="nova-api-api" containerID="cri-o://201a465779d214e48736e7c55e3a3f2c9b539bd43938ef85dcd2ea6e2dd79ba8" gracePeriod=30 Jan 04 12:12:12 crc kubenswrapper[5003]: I0104 12:12:12.036344 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"890e99fd-959d-4946-9716-acfe78278964","Type":"ContainerStarted","Data":"5c16e890258cb037b15eed8bde5425015cdc0c2f4dd920723e36c9b49ced1ff0"} Jan 04 12:12:12 crc kubenswrapper[5003]: I0104 12:12:12.036943 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"890e99fd-959d-4946-9716-acfe78278964","Type":"ContainerStarted","Data":"b5ed1b408c59b4ef15d5def5aa2df873918de3c2767b9c6391eb16944810d1fc"} Jan 04 12:12:12 crc kubenswrapper[5003]: I0104 12:12:12.047824 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcd6f8f8f-7kqd7" event={"ID":"67adf34a-962a-435b-8e35-ae1387c097b6","Type":"ContainerStarted","Data":"ce4043834be33197cdad3e565b1de177593fb0b0b9db95e5d58f8971339c2f91"} Jan 04 12:12:12 crc kubenswrapper[5003]: I0104 12:12:12.048150 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-fcd6f8f8f-7kqd7" Jan 04 12:12:12 crc kubenswrapper[5003]: I0104 12:12:12.063280 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.063252812 podStartE2EDuration="2.063252812s" podCreationTimestamp="2026-01-04 12:12:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:12:12.061375803 +0000 UTC m=+1447.534405654" watchObservedRunningTime="2026-01-04 12:12:12.063252812 +0000 UTC m=+1447.536282653" Jan 04 12:12:12 crc kubenswrapper[5003]: I0104 12:12:12.107214 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-fcd6f8f8f-7kqd7" podStartSLOduration=4.107179 podStartE2EDuration="4.107179s" podCreationTimestamp="2026-01-04 12:12:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:12:12.099214582 +0000 UTC m=+1447.572244433" watchObservedRunningTime="2026-01-04 12:12:12.107179 +0000 UTC m=+1447.580208841" Jan 04 12:12:13 crc kubenswrapper[5003]: I0104 12:12:13.060736 5003 generic.go:334] "Generic (PLEG): container finished" podID="2f6f0f96-f5e0-4941-ae0e-61300c0709f0" containerID="d876c470bb5311460d153b107e11b2b7ee80d059a9b8a6c247efe3447855b19e" exitCode=143 Jan 04 12:12:13 crc kubenswrapper[5003]: I0104 12:12:13.060797 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2f6f0f96-f5e0-4941-ae0e-61300c0709f0","Type":"ContainerDied","Data":"d876c470bb5311460d153b107e11b2b7ee80d059a9b8a6c247efe3447855b19e"} Jan 04 12:12:13 crc kubenswrapper[5003]: I0104 12:12:13.061417 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d4c92d74-120a-4b34-9792-3508cfcd588b" containerName="ceilometer-central-agent" containerID="cri-o://604b321a0e329f5747aee34bdf500c968c0f6c5d55a69074e18de67d06a4ad83" gracePeriod=30 Jan 04 12:12:13 crc kubenswrapper[5003]: I0104 12:12:13.061937 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d4c92d74-120a-4b34-9792-3508cfcd588b" containerName="proxy-httpd" containerID="cri-o://9e48cd49daa2dc3d1dcc758d5b96d45f2b60846e815fe5b02b8b7243bb96f783" gracePeriod=30 Jan 04 12:12:13 crc kubenswrapper[5003]: I0104 12:12:13.061970 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d4c92d74-120a-4b34-9792-3508cfcd588b" containerName="ceilometer-notification-agent" containerID="cri-o://1a3bb4cdbc1251124253a6625a3d33227d76f13d264530dd23fa9b13f8e3f17d" gracePeriod=30 Jan 04 12:12:13 crc kubenswrapper[5003]: I0104 12:12:13.062093 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d4c92d74-120a-4b34-9792-3508cfcd588b" containerName="sg-core" containerID="cri-o://b4e752e48482fdcecc8354cd6dd09f747779acbec0452a9675cffe27bb13838e" gracePeriod=30 Jan 04 12:12:13 crc kubenswrapper[5003]: I0104 12:12:13.960511 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.072085 5003 generic.go:334] "Generic (PLEG): container finished" podID="d4c92d74-120a-4b34-9792-3508cfcd588b" containerID="9e48cd49daa2dc3d1dcc758d5b96d45f2b60846e815fe5b02b8b7243bb96f783" exitCode=0 Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.072122 5003 generic.go:334] "Generic (PLEG): container finished" podID="d4c92d74-120a-4b34-9792-3508cfcd588b" containerID="b4e752e48482fdcecc8354cd6dd09f747779acbec0452a9675cffe27bb13838e" exitCode=2 Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.072132 5003 generic.go:334] "Generic (PLEG): container finished" podID="d4c92d74-120a-4b34-9792-3508cfcd588b" containerID="1a3bb4cdbc1251124253a6625a3d33227d76f13d264530dd23fa9b13f8e3f17d" exitCode=0 Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.072141 5003 generic.go:334] "Generic (PLEG): container finished" podID="d4c92d74-120a-4b34-9792-3508cfcd588b" containerID="604b321a0e329f5747aee34bdf500c968c0f6c5d55a69074e18de67d06a4ad83" exitCode=0 Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.073158 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.073273 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4c92d74-120a-4b34-9792-3508cfcd588b","Type":"ContainerDied","Data":"9e48cd49daa2dc3d1dcc758d5b96d45f2b60846e815fe5b02b8b7243bb96f783"} Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.073332 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4c92d74-120a-4b34-9792-3508cfcd588b","Type":"ContainerDied","Data":"b4e752e48482fdcecc8354cd6dd09f747779acbec0452a9675cffe27bb13838e"} Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.073344 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4c92d74-120a-4b34-9792-3508cfcd588b","Type":"ContainerDied","Data":"1a3bb4cdbc1251124253a6625a3d33227d76f13d264530dd23fa9b13f8e3f17d"} Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.073353 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4c92d74-120a-4b34-9792-3508cfcd588b","Type":"ContainerDied","Data":"604b321a0e329f5747aee34bdf500c968c0f6c5d55a69074e18de67d06a4ad83"} Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.073363 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d4c92d74-120a-4b34-9792-3508cfcd588b","Type":"ContainerDied","Data":"21c03817ae1b63d5adbcebd0eefbb347e11ee8ea52cbe921e591aad47e09722c"} Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.073379 5003 scope.go:117] "RemoveContainer" containerID="9e48cd49daa2dc3d1dcc758d5b96d45f2b60846e815fe5b02b8b7243bb96f783" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.095732 5003 scope.go:117] "RemoveContainer" containerID="b4e752e48482fdcecc8354cd6dd09f747779acbec0452a9675cffe27bb13838e" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.119533 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4c92d74-120a-4b34-9792-3508cfcd588b-ceilometer-tls-certs\") pod \"d4c92d74-120a-4b34-9792-3508cfcd588b\" (UID: \"d4c92d74-120a-4b34-9792-3508cfcd588b\") " Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.119717 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4c92d74-120a-4b34-9792-3508cfcd588b-combined-ca-bundle\") pod \"d4c92d74-120a-4b34-9792-3508cfcd588b\" (UID: \"d4c92d74-120a-4b34-9792-3508cfcd588b\") " Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.120165 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4c92d74-120a-4b34-9792-3508cfcd588b-config-data\") pod \"d4c92d74-120a-4b34-9792-3508cfcd588b\" (UID: \"d4c92d74-120a-4b34-9792-3508cfcd588b\") " Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.120307 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4c92d74-120a-4b34-9792-3508cfcd588b-run-httpd\") pod \"d4c92d74-120a-4b34-9792-3508cfcd588b\" (UID: \"d4c92d74-120a-4b34-9792-3508cfcd588b\") " Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.120388 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d4c92d74-120a-4b34-9792-3508cfcd588b-sg-core-conf-yaml\") pod \"d4c92d74-120a-4b34-9792-3508cfcd588b\" (UID: \"d4c92d74-120a-4b34-9792-3508cfcd588b\") " Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.120517 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4c92d74-120a-4b34-9792-3508cfcd588b-log-httpd\") pod \"d4c92d74-120a-4b34-9792-3508cfcd588b\" (UID: \"d4c92d74-120a-4b34-9792-3508cfcd588b\") " Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.120570 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4c92d74-120a-4b34-9792-3508cfcd588b-scripts\") pod \"d4c92d74-120a-4b34-9792-3508cfcd588b\" (UID: \"d4c92d74-120a-4b34-9792-3508cfcd588b\") " Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.120645 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-frvkm\" (UniqueName: \"kubernetes.io/projected/d4c92d74-120a-4b34-9792-3508cfcd588b-kube-api-access-frvkm\") pod \"d4c92d74-120a-4b34-9792-3508cfcd588b\" (UID: \"d4c92d74-120a-4b34-9792-3508cfcd588b\") " Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.120855 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4c92d74-120a-4b34-9792-3508cfcd588b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d4c92d74-120a-4b34-9792-3508cfcd588b" (UID: "d4c92d74-120a-4b34-9792-3508cfcd588b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.121334 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4c92d74-120a-4b34-9792-3508cfcd588b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d4c92d74-120a-4b34-9792-3508cfcd588b" (UID: "d4c92d74-120a-4b34-9792-3508cfcd588b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.121848 5003 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4c92d74-120a-4b34-9792-3508cfcd588b-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.121866 5003 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4c92d74-120a-4b34-9792-3508cfcd588b-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.122246 5003 scope.go:117] "RemoveContainer" containerID="1a3bb4cdbc1251124253a6625a3d33227d76f13d264530dd23fa9b13f8e3f17d" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.126497 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4c92d74-120a-4b34-9792-3508cfcd588b-kube-api-access-frvkm" (OuterVolumeSpecName: "kube-api-access-frvkm") pod "d4c92d74-120a-4b34-9792-3508cfcd588b" (UID: "d4c92d74-120a-4b34-9792-3508cfcd588b"). InnerVolumeSpecName "kube-api-access-frvkm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.128110 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4c92d74-120a-4b34-9792-3508cfcd588b-scripts" (OuterVolumeSpecName: "scripts") pod "d4c92d74-120a-4b34-9792-3508cfcd588b" (UID: "d4c92d74-120a-4b34-9792-3508cfcd588b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.154261 5003 scope.go:117] "RemoveContainer" containerID="604b321a0e329f5747aee34bdf500c968c0f6c5d55a69074e18de67d06a4ad83" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.159881 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4c92d74-120a-4b34-9792-3508cfcd588b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "d4c92d74-120a-4b34-9792-3508cfcd588b" (UID: "d4c92d74-120a-4b34-9792-3508cfcd588b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.181719 5003 scope.go:117] "RemoveContainer" containerID="9e48cd49daa2dc3d1dcc758d5b96d45f2b60846e815fe5b02b8b7243bb96f783" Jan 04 12:12:14 crc kubenswrapper[5003]: E0104 12:12:14.183562 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e48cd49daa2dc3d1dcc758d5b96d45f2b60846e815fe5b02b8b7243bb96f783\": container with ID starting with 9e48cd49daa2dc3d1dcc758d5b96d45f2b60846e815fe5b02b8b7243bb96f783 not found: ID does not exist" containerID="9e48cd49daa2dc3d1dcc758d5b96d45f2b60846e815fe5b02b8b7243bb96f783" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.183616 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e48cd49daa2dc3d1dcc758d5b96d45f2b60846e815fe5b02b8b7243bb96f783"} err="failed to get container status \"9e48cd49daa2dc3d1dcc758d5b96d45f2b60846e815fe5b02b8b7243bb96f783\": rpc error: code = NotFound desc = could not find container \"9e48cd49daa2dc3d1dcc758d5b96d45f2b60846e815fe5b02b8b7243bb96f783\": container with ID starting with 9e48cd49daa2dc3d1dcc758d5b96d45f2b60846e815fe5b02b8b7243bb96f783 not found: ID does not exist" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.183650 5003 scope.go:117] "RemoveContainer" containerID="b4e752e48482fdcecc8354cd6dd09f747779acbec0452a9675cffe27bb13838e" Jan 04 12:12:14 crc kubenswrapper[5003]: E0104 12:12:14.185228 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4e752e48482fdcecc8354cd6dd09f747779acbec0452a9675cffe27bb13838e\": container with ID starting with b4e752e48482fdcecc8354cd6dd09f747779acbec0452a9675cffe27bb13838e not found: ID does not exist" containerID="b4e752e48482fdcecc8354cd6dd09f747779acbec0452a9675cffe27bb13838e" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.185255 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4e752e48482fdcecc8354cd6dd09f747779acbec0452a9675cffe27bb13838e"} err="failed to get container status \"b4e752e48482fdcecc8354cd6dd09f747779acbec0452a9675cffe27bb13838e\": rpc error: code = NotFound desc = could not find container \"b4e752e48482fdcecc8354cd6dd09f747779acbec0452a9675cffe27bb13838e\": container with ID starting with b4e752e48482fdcecc8354cd6dd09f747779acbec0452a9675cffe27bb13838e not found: ID does not exist" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.185270 5003 scope.go:117] "RemoveContainer" containerID="1a3bb4cdbc1251124253a6625a3d33227d76f13d264530dd23fa9b13f8e3f17d" Jan 04 12:12:14 crc kubenswrapper[5003]: E0104 12:12:14.186420 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a3bb4cdbc1251124253a6625a3d33227d76f13d264530dd23fa9b13f8e3f17d\": container with ID starting with 1a3bb4cdbc1251124253a6625a3d33227d76f13d264530dd23fa9b13f8e3f17d not found: ID does not exist" containerID="1a3bb4cdbc1251124253a6625a3d33227d76f13d264530dd23fa9b13f8e3f17d" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.186452 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a3bb4cdbc1251124253a6625a3d33227d76f13d264530dd23fa9b13f8e3f17d"} err="failed to get container status \"1a3bb4cdbc1251124253a6625a3d33227d76f13d264530dd23fa9b13f8e3f17d\": rpc error: code = NotFound desc = could not find container \"1a3bb4cdbc1251124253a6625a3d33227d76f13d264530dd23fa9b13f8e3f17d\": container with ID starting with 1a3bb4cdbc1251124253a6625a3d33227d76f13d264530dd23fa9b13f8e3f17d not found: ID does not exist" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.186471 5003 scope.go:117] "RemoveContainer" containerID="604b321a0e329f5747aee34bdf500c968c0f6c5d55a69074e18de67d06a4ad83" Jan 04 12:12:14 crc kubenswrapper[5003]: E0104 12:12:14.186838 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"604b321a0e329f5747aee34bdf500c968c0f6c5d55a69074e18de67d06a4ad83\": container with ID starting with 604b321a0e329f5747aee34bdf500c968c0f6c5d55a69074e18de67d06a4ad83 not found: ID does not exist" containerID="604b321a0e329f5747aee34bdf500c968c0f6c5d55a69074e18de67d06a4ad83" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.186902 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"604b321a0e329f5747aee34bdf500c968c0f6c5d55a69074e18de67d06a4ad83"} err="failed to get container status \"604b321a0e329f5747aee34bdf500c968c0f6c5d55a69074e18de67d06a4ad83\": rpc error: code = NotFound desc = could not find container \"604b321a0e329f5747aee34bdf500c968c0f6c5d55a69074e18de67d06a4ad83\": container with ID starting with 604b321a0e329f5747aee34bdf500c968c0f6c5d55a69074e18de67d06a4ad83 not found: ID does not exist" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.186920 5003 scope.go:117] "RemoveContainer" containerID="9e48cd49daa2dc3d1dcc758d5b96d45f2b60846e815fe5b02b8b7243bb96f783" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.187171 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e48cd49daa2dc3d1dcc758d5b96d45f2b60846e815fe5b02b8b7243bb96f783"} err="failed to get container status \"9e48cd49daa2dc3d1dcc758d5b96d45f2b60846e815fe5b02b8b7243bb96f783\": rpc error: code = NotFound desc = could not find container \"9e48cd49daa2dc3d1dcc758d5b96d45f2b60846e815fe5b02b8b7243bb96f783\": container with ID starting with 9e48cd49daa2dc3d1dcc758d5b96d45f2b60846e815fe5b02b8b7243bb96f783 not found: ID does not exist" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.187195 5003 scope.go:117] "RemoveContainer" containerID="b4e752e48482fdcecc8354cd6dd09f747779acbec0452a9675cffe27bb13838e" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.187391 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4e752e48482fdcecc8354cd6dd09f747779acbec0452a9675cffe27bb13838e"} err="failed to get container status \"b4e752e48482fdcecc8354cd6dd09f747779acbec0452a9675cffe27bb13838e\": rpc error: code = NotFound desc = could not find container \"b4e752e48482fdcecc8354cd6dd09f747779acbec0452a9675cffe27bb13838e\": container with ID starting with b4e752e48482fdcecc8354cd6dd09f747779acbec0452a9675cffe27bb13838e not found: ID does not exist" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.187418 5003 scope.go:117] "RemoveContainer" containerID="1a3bb4cdbc1251124253a6625a3d33227d76f13d264530dd23fa9b13f8e3f17d" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.187681 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a3bb4cdbc1251124253a6625a3d33227d76f13d264530dd23fa9b13f8e3f17d"} err="failed to get container status \"1a3bb4cdbc1251124253a6625a3d33227d76f13d264530dd23fa9b13f8e3f17d\": rpc error: code = NotFound desc = could not find container \"1a3bb4cdbc1251124253a6625a3d33227d76f13d264530dd23fa9b13f8e3f17d\": container with ID starting with 1a3bb4cdbc1251124253a6625a3d33227d76f13d264530dd23fa9b13f8e3f17d not found: ID does not exist" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.187698 5003 scope.go:117] "RemoveContainer" containerID="604b321a0e329f5747aee34bdf500c968c0f6c5d55a69074e18de67d06a4ad83" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.188027 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"604b321a0e329f5747aee34bdf500c968c0f6c5d55a69074e18de67d06a4ad83"} err="failed to get container status \"604b321a0e329f5747aee34bdf500c968c0f6c5d55a69074e18de67d06a4ad83\": rpc error: code = NotFound desc = could not find container \"604b321a0e329f5747aee34bdf500c968c0f6c5d55a69074e18de67d06a4ad83\": container with ID starting with 604b321a0e329f5747aee34bdf500c968c0f6c5d55a69074e18de67d06a4ad83 not found: ID does not exist" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.188094 5003 scope.go:117] "RemoveContainer" containerID="9e48cd49daa2dc3d1dcc758d5b96d45f2b60846e815fe5b02b8b7243bb96f783" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.188418 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e48cd49daa2dc3d1dcc758d5b96d45f2b60846e815fe5b02b8b7243bb96f783"} err="failed to get container status \"9e48cd49daa2dc3d1dcc758d5b96d45f2b60846e815fe5b02b8b7243bb96f783\": rpc error: code = NotFound desc = could not find container \"9e48cd49daa2dc3d1dcc758d5b96d45f2b60846e815fe5b02b8b7243bb96f783\": container with ID starting with 9e48cd49daa2dc3d1dcc758d5b96d45f2b60846e815fe5b02b8b7243bb96f783 not found: ID does not exist" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.188461 5003 scope.go:117] "RemoveContainer" containerID="b4e752e48482fdcecc8354cd6dd09f747779acbec0452a9675cffe27bb13838e" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.188934 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4e752e48482fdcecc8354cd6dd09f747779acbec0452a9675cffe27bb13838e"} err="failed to get container status \"b4e752e48482fdcecc8354cd6dd09f747779acbec0452a9675cffe27bb13838e\": rpc error: code = NotFound desc = could not find container \"b4e752e48482fdcecc8354cd6dd09f747779acbec0452a9675cffe27bb13838e\": container with ID starting with b4e752e48482fdcecc8354cd6dd09f747779acbec0452a9675cffe27bb13838e not found: ID does not exist" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.188962 5003 scope.go:117] "RemoveContainer" containerID="1a3bb4cdbc1251124253a6625a3d33227d76f13d264530dd23fa9b13f8e3f17d" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.189835 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a3bb4cdbc1251124253a6625a3d33227d76f13d264530dd23fa9b13f8e3f17d"} err="failed to get container status \"1a3bb4cdbc1251124253a6625a3d33227d76f13d264530dd23fa9b13f8e3f17d\": rpc error: code = NotFound desc = could not find container \"1a3bb4cdbc1251124253a6625a3d33227d76f13d264530dd23fa9b13f8e3f17d\": container with ID starting with 1a3bb4cdbc1251124253a6625a3d33227d76f13d264530dd23fa9b13f8e3f17d not found: ID does not exist" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.189863 5003 scope.go:117] "RemoveContainer" containerID="604b321a0e329f5747aee34bdf500c968c0f6c5d55a69074e18de67d06a4ad83" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.190207 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"604b321a0e329f5747aee34bdf500c968c0f6c5d55a69074e18de67d06a4ad83"} err="failed to get container status \"604b321a0e329f5747aee34bdf500c968c0f6c5d55a69074e18de67d06a4ad83\": rpc error: code = NotFound desc = could not find container \"604b321a0e329f5747aee34bdf500c968c0f6c5d55a69074e18de67d06a4ad83\": container with ID starting with 604b321a0e329f5747aee34bdf500c968c0f6c5d55a69074e18de67d06a4ad83 not found: ID does not exist" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.190232 5003 scope.go:117] "RemoveContainer" containerID="9e48cd49daa2dc3d1dcc758d5b96d45f2b60846e815fe5b02b8b7243bb96f783" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.190551 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e48cd49daa2dc3d1dcc758d5b96d45f2b60846e815fe5b02b8b7243bb96f783"} err="failed to get container status \"9e48cd49daa2dc3d1dcc758d5b96d45f2b60846e815fe5b02b8b7243bb96f783\": rpc error: code = NotFound desc = could not find container \"9e48cd49daa2dc3d1dcc758d5b96d45f2b60846e815fe5b02b8b7243bb96f783\": container with ID starting with 9e48cd49daa2dc3d1dcc758d5b96d45f2b60846e815fe5b02b8b7243bb96f783 not found: ID does not exist" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.190574 5003 scope.go:117] "RemoveContainer" containerID="b4e752e48482fdcecc8354cd6dd09f747779acbec0452a9675cffe27bb13838e" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.191296 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4e752e48482fdcecc8354cd6dd09f747779acbec0452a9675cffe27bb13838e"} err="failed to get container status \"b4e752e48482fdcecc8354cd6dd09f747779acbec0452a9675cffe27bb13838e\": rpc error: code = NotFound desc = could not find container \"b4e752e48482fdcecc8354cd6dd09f747779acbec0452a9675cffe27bb13838e\": container with ID starting with b4e752e48482fdcecc8354cd6dd09f747779acbec0452a9675cffe27bb13838e not found: ID does not exist" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.191318 5003 scope.go:117] "RemoveContainer" containerID="1a3bb4cdbc1251124253a6625a3d33227d76f13d264530dd23fa9b13f8e3f17d" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.191626 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a3bb4cdbc1251124253a6625a3d33227d76f13d264530dd23fa9b13f8e3f17d"} err="failed to get container status \"1a3bb4cdbc1251124253a6625a3d33227d76f13d264530dd23fa9b13f8e3f17d\": rpc error: code = NotFound desc = could not find container \"1a3bb4cdbc1251124253a6625a3d33227d76f13d264530dd23fa9b13f8e3f17d\": container with ID starting with 1a3bb4cdbc1251124253a6625a3d33227d76f13d264530dd23fa9b13f8e3f17d not found: ID does not exist" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.191655 5003 scope.go:117] "RemoveContainer" containerID="604b321a0e329f5747aee34bdf500c968c0f6c5d55a69074e18de67d06a4ad83" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.192033 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"604b321a0e329f5747aee34bdf500c968c0f6c5d55a69074e18de67d06a4ad83"} err="failed to get container status \"604b321a0e329f5747aee34bdf500c968c0f6c5d55a69074e18de67d06a4ad83\": rpc error: code = NotFound desc = could not find container \"604b321a0e329f5747aee34bdf500c968c0f6c5d55a69074e18de67d06a4ad83\": container with ID starting with 604b321a0e329f5747aee34bdf500c968c0f6c5d55a69074e18de67d06a4ad83 not found: ID does not exist" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.200582 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4c92d74-120a-4b34-9792-3508cfcd588b-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "d4c92d74-120a-4b34-9792-3508cfcd588b" (UID: "d4c92d74-120a-4b34-9792-3508cfcd588b"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.226259 5003 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d4c92d74-120a-4b34-9792-3508cfcd588b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.226617 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4c92d74-120a-4b34-9792-3508cfcd588b-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.226727 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-frvkm\" (UniqueName: \"kubernetes.io/projected/d4c92d74-120a-4b34-9792-3508cfcd588b-kube-api-access-frvkm\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.226823 5003 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4c92d74-120a-4b34-9792-3508cfcd588b-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.243826 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4c92d74-120a-4b34-9792-3508cfcd588b-config-data" (OuterVolumeSpecName: "config-data") pod "d4c92d74-120a-4b34-9792-3508cfcd588b" (UID: "d4c92d74-120a-4b34-9792-3508cfcd588b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.245597 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4c92d74-120a-4b34-9792-3508cfcd588b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d4c92d74-120a-4b34-9792-3508cfcd588b" (UID: "d4c92d74-120a-4b34-9792-3508cfcd588b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.329222 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4c92d74-120a-4b34-9792-3508cfcd588b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.329575 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4c92d74-120a-4b34-9792-3508cfcd588b-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.456313 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.470114 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.479298 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:12:14 crc kubenswrapper[5003]: E0104 12:12:14.479816 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4c92d74-120a-4b34-9792-3508cfcd588b" containerName="ceilometer-notification-agent" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.479844 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4c92d74-120a-4b34-9792-3508cfcd588b" containerName="ceilometer-notification-agent" Jan 04 12:12:14 crc kubenswrapper[5003]: E0104 12:12:14.479877 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4c92d74-120a-4b34-9792-3508cfcd588b" containerName="sg-core" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.479885 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4c92d74-120a-4b34-9792-3508cfcd588b" containerName="sg-core" Jan 04 12:12:14 crc kubenswrapper[5003]: E0104 12:12:14.479904 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4c92d74-120a-4b34-9792-3508cfcd588b" containerName="ceilometer-central-agent" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.479910 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4c92d74-120a-4b34-9792-3508cfcd588b" containerName="ceilometer-central-agent" Jan 04 12:12:14 crc kubenswrapper[5003]: E0104 12:12:14.479925 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4c92d74-120a-4b34-9792-3508cfcd588b" containerName="proxy-httpd" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.479931 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4c92d74-120a-4b34-9792-3508cfcd588b" containerName="proxy-httpd" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.480198 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4c92d74-120a-4b34-9792-3508cfcd588b" containerName="proxy-httpd" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.480217 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4c92d74-120a-4b34-9792-3508cfcd588b" containerName="sg-core" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.480237 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4c92d74-120a-4b34-9792-3508cfcd588b" containerName="ceilometer-notification-agent" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.480251 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4c92d74-120a-4b34-9792-3508cfcd588b" containerName="ceilometer-central-agent" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.482215 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.485076 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.485130 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.485591 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.486730 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.636285 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-config-data\") pod \"ceilometer-0\" (UID: \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\") " pod="openstack/ceilometer-0" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.636545 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-scripts\") pod \"ceilometer-0\" (UID: \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\") " pod="openstack/ceilometer-0" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.636686 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-run-httpd\") pod \"ceilometer-0\" (UID: \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\") " pod="openstack/ceilometer-0" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.636768 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\") " pod="openstack/ceilometer-0" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.636797 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\") " pod="openstack/ceilometer-0" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.636835 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\") " pod="openstack/ceilometer-0" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.636899 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2tnh\" (UniqueName: \"kubernetes.io/projected/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-kube-api-access-n2tnh\") pod \"ceilometer-0\" (UID: \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\") " pod="openstack/ceilometer-0" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.636944 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-log-httpd\") pod \"ceilometer-0\" (UID: \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\") " pod="openstack/ceilometer-0" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.739368 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-config-data\") pod \"ceilometer-0\" (UID: \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\") " pod="openstack/ceilometer-0" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.739877 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-scripts\") pod \"ceilometer-0\" (UID: \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\") " pod="openstack/ceilometer-0" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.739921 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-run-httpd\") pod \"ceilometer-0\" (UID: \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\") " pod="openstack/ceilometer-0" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.739971 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\") " pod="openstack/ceilometer-0" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.739989 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\") " pod="openstack/ceilometer-0" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.740042 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\") " pod="openstack/ceilometer-0" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.740076 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2tnh\" (UniqueName: \"kubernetes.io/projected/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-kube-api-access-n2tnh\") pod \"ceilometer-0\" (UID: \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\") " pod="openstack/ceilometer-0" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.740123 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-log-httpd\") pod \"ceilometer-0\" (UID: \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\") " pod="openstack/ceilometer-0" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.740617 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-log-httpd\") pod \"ceilometer-0\" (UID: \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\") " pod="openstack/ceilometer-0" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.740924 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-run-httpd\") pod \"ceilometer-0\" (UID: \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\") " pod="openstack/ceilometer-0" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.749992 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-config-data\") pod \"ceilometer-0\" (UID: \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\") " pod="openstack/ceilometer-0" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.750207 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\") " pod="openstack/ceilometer-0" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.754768 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\") " pod="openstack/ceilometer-0" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.756314 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\") " pod="openstack/ceilometer-0" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.758913 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-scripts\") pod \"ceilometer-0\" (UID: \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\") " pod="openstack/ceilometer-0" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.763961 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2tnh\" (UniqueName: \"kubernetes.io/projected/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-kube-api-access-n2tnh\") pod \"ceilometer-0\" (UID: \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\") " pod="openstack/ceilometer-0" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.817891 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4c92d74-120a-4b34-9792-3508cfcd588b" path="/var/lib/kubelet/pods/d4c92d74-120a-4b34-9792-3508cfcd588b/volumes" Jan 04 12:12:14 crc kubenswrapper[5003]: I0104 12:12:14.845619 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:12:15 crc kubenswrapper[5003]: I0104 12:12:15.347792 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:12:15 crc kubenswrapper[5003]: I0104 12:12:15.478587 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:12:15 crc kubenswrapper[5003]: I0104 12:12:15.764850 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 04 12:12:15 crc kubenswrapper[5003]: I0104 12:12:15.870097 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f6f0f96-f5e0-4941-ae0e-61300c0709f0-combined-ca-bundle\") pod \"2f6f0f96-f5e0-4941-ae0e-61300c0709f0\" (UID: \"2f6f0f96-f5e0-4941-ae0e-61300c0709f0\") " Jan 04 12:12:15 crc kubenswrapper[5003]: I0104 12:12:15.870262 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f6f0f96-f5e0-4941-ae0e-61300c0709f0-config-data\") pod \"2f6f0f96-f5e0-4941-ae0e-61300c0709f0\" (UID: \"2f6f0f96-f5e0-4941-ae0e-61300c0709f0\") " Jan 04 12:12:15 crc kubenswrapper[5003]: I0104 12:12:15.870367 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5hb27\" (UniqueName: \"kubernetes.io/projected/2f6f0f96-f5e0-4941-ae0e-61300c0709f0-kube-api-access-5hb27\") pod \"2f6f0f96-f5e0-4941-ae0e-61300c0709f0\" (UID: \"2f6f0f96-f5e0-4941-ae0e-61300c0709f0\") " Jan 04 12:12:15 crc kubenswrapper[5003]: I0104 12:12:15.870473 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f6f0f96-f5e0-4941-ae0e-61300c0709f0-logs\") pod \"2f6f0f96-f5e0-4941-ae0e-61300c0709f0\" (UID: \"2f6f0f96-f5e0-4941-ae0e-61300c0709f0\") " Jan 04 12:12:15 crc kubenswrapper[5003]: I0104 12:12:15.871182 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f6f0f96-f5e0-4941-ae0e-61300c0709f0-logs" (OuterVolumeSpecName: "logs") pod "2f6f0f96-f5e0-4941-ae0e-61300c0709f0" (UID: "2f6f0f96-f5e0-4941-ae0e-61300c0709f0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:15 crc kubenswrapper[5003]: I0104 12:12:15.875959 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f6f0f96-f5e0-4941-ae0e-61300c0709f0-kube-api-access-5hb27" (OuterVolumeSpecName: "kube-api-access-5hb27") pod "2f6f0f96-f5e0-4941-ae0e-61300c0709f0" (UID: "2f6f0f96-f5e0-4941-ae0e-61300c0709f0"). InnerVolumeSpecName "kube-api-access-5hb27". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:15 crc kubenswrapper[5003]: I0104 12:12:15.901244 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f6f0f96-f5e0-4941-ae0e-61300c0709f0-config-data" (OuterVolumeSpecName: "config-data") pod "2f6f0f96-f5e0-4941-ae0e-61300c0709f0" (UID: "2f6f0f96-f5e0-4941-ae0e-61300c0709f0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:15 crc kubenswrapper[5003]: I0104 12:12:15.931465 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f6f0f96-f5e0-4941-ae0e-61300c0709f0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2f6f0f96-f5e0-4941-ae0e-61300c0709f0" (UID: "2f6f0f96-f5e0-4941-ae0e-61300c0709f0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:15 crc kubenswrapper[5003]: I0104 12:12:15.973884 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f6f0f96-f5e0-4941-ae0e-61300c0709f0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:15 crc kubenswrapper[5003]: I0104 12:12:15.973923 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f6f0f96-f5e0-4941-ae0e-61300c0709f0-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:15 crc kubenswrapper[5003]: I0104 12:12:15.973939 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5hb27\" (UniqueName: \"kubernetes.io/projected/2f6f0f96-f5e0-4941-ae0e-61300c0709f0-kube-api-access-5hb27\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:15 crc kubenswrapper[5003]: I0104 12:12:15.973950 5003 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f6f0f96-f5e0-4941-ae0e-61300c0709f0-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.104938 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42","Type":"ContainerStarted","Data":"2d1f1d056b61194569686724f2003dba8a8e5a0d07b9d3c97b835d48ef2f3c01"} Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.104986 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42","Type":"ContainerStarted","Data":"70aae434263e1dd5f3014d291c91e75d5bad2e7f752df83cda006c169d11dc8e"} Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.110357 5003 generic.go:334] "Generic (PLEG): container finished" podID="2f6f0f96-f5e0-4941-ae0e-61300c0709f0" containerID="201a465779d214e48736e7c55e3a3f2c9b539bd43938ef85dcd2ea6e2dd79ba8" exitCode=0 Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.110415 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2f6f0f96-f5e0-4941-ae0e-61300c0709f0","Type":"ContainerDied","Data":"201a465779d214e48736e7c55e3a3f2c9b539bd43938ef85dcd2ea6e2dd79ba8"} Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.110450 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.110472 5003 scope.go:117] "RemoveContainer" containerID="201a465779d214e48736e7c55e3a3f2c9b539bd43938ef85dcd2ea6e2dd79ba8" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.110457 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2f6f0f96-f5e0-4941-ae0e-61300c0709f0","Type":"ContainerDied","Data":"f538beb5fcfaf33ae4d77f5cb23667cb250ae4ce951346ede7f6e60e7bb249ff"} Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.146420 5003 scope.go:117] "RemoveContainer" containerID="d876c470bb5311460d153b107e11b2b7ee80d059a9b8a6c247efe3447855b19e" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.164007 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.170734 5003 scope.go:117] "RemoveContainer" containerID="201a465779d214e48736e7c55e3a3f2c9b539bd43938ef85dcd2ea6e2dd79ba8" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.177284 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:12:16 crc kubenswrapper[5003]: E0104 12:12:16.183810 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"201a465779d214e48736e7c55e3a3f2c9b539bd43938ef85dcd2ea6e2dd79ba8\": container with ID starting with 201a465779d214e48736e7c55e3a3f2c9b539bd43938ef85dcd2ea6e2dd79ba8 not found: ID does not exist" containerID="201a465779d214e48736e7c55e3a3f2c9b539bd43938ef85dcd2ea6e2dd79ba8" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.183862 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"201a465779d214e48736e7c55e3a3f2c9b539bd43938ef85dcd2ea6e2dd79ba8"} err="failed to get container status \"201a465779d214e48736e7c55e3a3f2c9b539bd43938ef85dcd2ea6e2dd79ba8\": rpc error: code = NotFound desc = could not find container \"201a465779d214e48736e7c55e3a3f2c9b539bd43938ef85dcd2ea6e2dd79ba8\": container with ID starting with 201a465779d214e48736e7c55e3a3f2c9b539bd43938ef85dcd2ea6e2dd79ba8 not found: ID does not exist" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.183897 5003 scope.go:117] "RemoveContainer" containerID="d876c470bb5311460d153b107e11b2b7ee80d059a9b8a6c247efe3447855b19e" Jan 04 12:12:16 crc kubenswrapper[5003]: E0104 12:12:16.185701 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d876c470bb5311460d153b107e11b2b7ee80d059a9b8a6c247efe3447855b19e\": container with ID starting with d876c470bb5311460d153b107e11b2b7ee80d059a9b8a6c247efe3447855b19e not found: ID does not exist" containerID="d876c470bb5311460d153b107e11b2b7ee80d059a9b8a6c247efe3447855b19e" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.185750 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d876c470bb5311460d153b107e11b2b7ee80d059a9b8a6c247efe3447855b19e"} err="failed to get container status \"d876c470bb5311460d153b107e11b2b7ee80d059a9b8a6c247efe3447855b19e\": rpc error: code = NotFound desc = could not find container \"d876c470bb5311460d153b107e11b2b7ee80d059a9b8a6c247efe3447855b19e\": container with ID starting with d876c470bb5311460d153b107e11b2b7ee80d059a9b8a6c247efe3447855b19e not found: ID does not exist" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.186777 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 04 12:12:16 crc kubenswrapper[5003]: E0104 12:12:16.187320 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f6f0f96-f5e0-4941-ae0e-61300c0709f0" containerName="nova-api-api" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.187347 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f6f0f96-f5e0-4941-ae0e-61300c0709f0" containerName="nova-api-api" Jan 04 12:12:16 crc kubenswrapper[5003]: E0104 12:12:16.187389 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f6f0f96-f5e0-4941-ae0e-61300c0709f0" containerName="nova-api-log" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.187400 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f6f0f96-f5e0-4941-ae0e-61300c0709f0" containerName="nova-api-log" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.187651 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f6f0f96-f5e0-4941-ae0e-61300c0709f0" containerName="nova-api-log" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.187685 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f6f0f96-f5e0-4941-ae0e-61300c0709f0" containerName="nova-api-api" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.188889 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.194373 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.194796 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.194934 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.206950 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.286938 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d672a129-34ab-4b75-9ad7-b9bd82c532da-internal-tls-certs\") pod \"nova-api-0\" (UID: \"d672a129-34ab-4b75-9ad7-b9bd82c532da\") " pod="openstack/nova-api-0" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.287353 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tkjv5\" (UniqueName: \"kubernetes.io/projected/d672a129-34ab-4b75-9ad7-b9bd82c532da-kube-api-access-tkjv5\") pod \"nova-api-0\" (UID: \"d672a129-34ab-4b75-9ad7-b9bd82c532da\") " pod="openstack/nova-api-0" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.287486 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d672a129-34ab-4b75-9ad7-b9bd82c532da-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d672a129-34ab-4b75-9ad7-b9bd82c532da\") " pod="openstack/nova-api-0" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.287528 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d672a129-34ab-4b75-9ad7-b9bd82c532da-public-tls-certs\") pod \"nova-api-0\" (UID: \"d672a129-34ab-4b75-9ad7-b9bd82c532da\") " pod="openstack/nova-api-0" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.287606 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d672a129-34ab-4b75-9ad7-b9bd82c532da-logs\") pod \"nova-api-0\" (UID: \"d672a129-34ab-4b75-9ad7-b9bd82c532da\") " pod="openstack/nova-api-0" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.287671 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d672a129-34ab-4b75-9ad7-b9bd82c532da-config-data\") pod \"nova-api-0\" (UID: \"d672a129-34ab-4b75-9ad7-b9bd82c532da\") " pod="openstack/nova-api-0" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.390214 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tkjv5\" (UniqueName: \"kubernetes.io/projected/d672a129-34ab-4b75-9ad7-b9bd82c532da-kube-api-access-tkjv5\") pod \"nova-api-0\" (UID: \"d672a129-34ab-4b75-9ad7-b9bd82c532da\") " pod="openstack/nova-api-0" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.390674 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d672a129-34ab-4b75-9ad7-b9bd82c532da-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d672a129-34ab-4b75-9ad7-b9bd82c532da\") " pod="openstack/nova-api-0" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.390693 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d672a129-34ab-4b75-9ad7-b9bd82c532da-public-tls-certs\") pod \"nova-api-0\" (UID: \"d672a129-34ab-4b75-9ad7-b9bd82c532da\") " pod="openstack/nova-api-0" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.390721 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d672a129-34ab-4b75-9ad7-b9bd82c532da-logs\") pod \"nova-api-0\" (UID: \"d672a129-34ab-4b75-9ad7-b9bd82c532da\") " pod="openstack/nova-api-0" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.390753 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d672a129-34ab-4b75-9ad7-b9bd82c532da-config-data\") pod \"nova-api-0\" (UID: \"d672a129-34ab-4b75-9ad7-b9bd82c532da\") " pod="openstack/nova-api-0" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.390836 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d672a129-34ab-4b75-9ad7-b9bd82c532da-internal-tls-certs\") pod \"nova-api-0\" (UID: \"d672a129-34ab-4b75-9ad7-b9bd82c532da\") " pod="openstack/nova-api-0" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.391298 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d672a129-34ab-4b75-9ad7-b9bd82c532da-logs\") pod \"nova-api-0\" (UID: \"d672a129-34ab-4b75-9ad7-b9bd82c532da\") " pod="openstack/nova-api-0" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.396034 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d672a129-34ab-4b75-9ad7-b9bd82c532da-config-data\") pod \"nova-api-0\" (UID: \"d672a129-34ab-4b75-9ad7-b9bd82c532da\") " pod="openstack/nova-api-0" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.396065 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d672a129-34ab-4b75-9ad7-b9bd82c532da-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d672a129-34ab-4b75-9ad7-b9bd82c532da\") " pod="openstack/nova-api-0" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.403651 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d672a129-34ab-4b75-9ad7-b9bd82c532da-internal-tls-certs\") pod \"nova-api-0\" (UID: \"d672a129-34ab-4b75-9ad7-b9bd82c532da\") " pod="openstack/nova-api-0" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.406906 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d672a129-34ab-4b75-9ad7-b9bd82c532da-public-tls-certs\") pod \"nova-api-0\" (UID: \"d672a129-34ab-4b75-9ad7-b9bd82c532da\") " pod="openstack/nova-api-0" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.413228 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tkjv5\" (UniqueName: \"kubernetes.io/projected/d672a129-34ab-4b75-9ad7-b9bd82c532da-kube-api-access-tkjv5\") pod \"nova-api-0\" (UID: \"d672a129-34ab-4b75-9ad7-b9bd82c532da\") " pod="openstack/nova-api-0" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.551913 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 04 12:12:16 crc kubenswrapper[5003]: I0104 12:12:16.827938 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f6f0f96-f5e0-4941-ae0e-61300c0709f0" path="/var/lib/kubelet/pods/2f6f0f96-f5e0-4941-ae0e-61300c0709f0/volumes" Jan 04 12:12:17 crc kubenswrapper[5003]: I0104 12:12:17.114950 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:12:17 crc kubenswrapper[5003]: I0104 12:12:17.130107 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42","Type":"ContainerStarted","Data":"e66e7dbec82eeff67f71401a2ea11fe88a1996da2709ae8632f320656877be7d"} Jan 04 12:12:18 crc kubenswrapper[5003]: I0104 12:12:18.153006 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42","Type":"ContainerStarted","Data":"c595cd857ef9b98c086bda781fccff343323ff20c25f68b8460e16c62cecb388"} Jan 04 12:12:18 crc kubenswrapper[5003]: I0104 12:12:18.155941 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d672a129-34ab-4b75-9ad7-b9bd82c532da","Type":"ContainerStarted","Data":"85f7ac1a28aa2dd3250d84bab825195f82f517d25555ba79be993484e503432a"} Jan 04 12:12:18 crc kubenswrapper[5003]: I0104 12:12:18.156051 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d672a129-34ab-4b75-9ad7-b9bd82c532da","Type":"ContainerStarted","Data":"a595bc4dc1cb538aa4b9367357d35bd9b2969ba0b4ad4cf26c5a222879433ef7"} Jan 04 12:12:18 crc kubenswrapper[5003]: I0104 12:12:18.156078 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d672a129-34ab-4b75-9ad7-b9bd82c532da","Type":"ContainerStarted","Data":"0784f0916a8bcd0b0064769fe32196908778b0dff467bc3650fbc370eb2888ad"} Jan 04 12:12:18 crc kubenswrapper[5003]: I0104 12:12:18.205329 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.205298856 podStartE2EDuration="2.205298856s" podCreationTimestamp="2026-01-04 12:12:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:12:18.181484204 +0000 UTC m=+1453.654514115" watchObservedRunningTime="2026-01-04 12:12:18.205298856 +0000 UTC m=+1453.678328737" Jan 04 12:12:19 crc kubenswrapper[5003]: I0104 12:12:19.158284 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-fcd6f8f8f-7kqd7" Jan 04 12:12:19 crc kubenswrapper[5003]: I0104 12:12:19.280075 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-647df7b8c5-wg2m4"] Jan 04 12:12:19 crc kubenswrapper[5003]: I0104 12:12:19.280409 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-647df7b8c5-wg2m4" podUID="5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4" containerName="dnsmasq-dns" containerID="cri-o://b5c68afcbdf36910913f01f74e14f8f3735e6d961665edf03efbc006b7d28267" gracePeriod=10 Jan 04 12:12:19 crc kubenswrapper[5003]: I0104 12:12:19.827003 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-647df7b8c5-wg2m4" Jan 04 12:12:19 crc kubenswrapper[5003]: I0104 12:12:19.900106 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-ovsdbserver-sb\") pod \"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4\" (UID: \"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4\") " Jan 04 12:12:19 crc kubenswrapper[5003]: I0104 12:12:19.900377 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-config\") pod \"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4\" (UID: \"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4\") " Jan 04 12:12:19 crc kubenswrapper[5003]: I0104 12:12:19.900416 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-dns-svc\") pod \"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4\" (UID: \"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4\") " Jan 04 12:12:19 crc kubenswrapper[5003]: I0104 12:12:19.900439 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-ovsdbserver-nb\") pod \"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4\" (UID: \"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4\") " Jan 04 12:12:19 crc kubenswrapper[5003]: I0104 12:12:19.900535 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-dns-swift-storage-0\") pod \"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4\" (UID: \"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4\") " Jan 04 12:12:19 crc kubenswrapper[5003]: I0104 12:12:19.900645 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kv7t9\" (UniqueName: \"kubernetes.io/projected/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-kube-api-access-kv7t9\") pod \"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4\" (UID: \"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4\") " Jan 04 12:12:19 crc kubenswrapper[5003]: I0104 12:12:19.907837 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-kube-api-access-kv7t9" (OuterVolumeSpecName: "kube-api-access-kv7t9") pod "5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4" (UID: "5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4"). InnerVolumeSpecName "kube-api-access-kv7t9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:19 crc kubenswrapper[5003]: I0104 12:12:19.958418 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4" (UID: "5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:19 crc kubenswrapper[5003]: I0104 12:12:19.963734 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4" (UID: "5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:19 crc kubenswrapper[5003]: I0104 12:12:19.965899 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4" (UID: "5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:19 crc kubenswrapper[5003]: I0104 12:12:19.975332 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4" (UID: "5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:19 crc kubenswrapper[5003]: I0104 12:12:19.982650 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-config" (OuterVolumeSpecName: "config") pod "5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4" (UID: "5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:12:20 crc kubenswrapper[5003]: I0104 12:12:20.003523 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:20 crc kubenswrapper[5003]: I0104 12:12:20.003555 5003 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:20 crc kubenswrapper[5003]: I0104 12:12:20.003568 5003 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:20 crc kubenswrapper[5003]: I0104 12:12:20.003580 5003 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:20 crc kubenswrapper[5003]: I0104 12:12:20.003593 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kv7t9\" (UniqueName: \"kubernetes.io/projected/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-kube-api-access-kv7t9\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:20 crc kubenswrapper[5003]: I0104 12:12:20.003601 5003 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:20 crc kubenswrapper[5003]: I0104 12:12:20.239211 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42","Type":"ContainerStarted","Data":"07b2e1874e1e6350f660adafaf47ead2fb632e268672667911eded298b5a3565"} Jan 04 12:12:20 crc kubenswrapper[5003]: I0104 12:12:20.239676 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Jan 04 12:12:20 crc kubenswrapper[5003]: I0104 12:12:20.241696 5003 generic.go:334] "Generic (PLEG): container finished" podID="5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4" containerID="b5c68afcbdf36910913f01f74e14f8f3735e6d961665edf03efbc006b7d28267" exitCode=0 Jan 04 12:12:20 crc kubenswrapper[5003]: I0104 12:12:20.241768 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-647df7b8c5-wg2m4" event={"ID":"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4","Type":"ContainerDied","Data":"b5c68afcbdf36910913f01f74e14f8f3735e6d961665edf03efbc006b7d28267"} Jan 04 12:12:20 crc kubenswrapper[5003]: I0104 12:12:20.241803 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-647df7b8c5-wg2m4" event={"ID":"5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4","Type":"ContainerDied","Data":"7343a2b31144245e84fd023eb5b79a90bc235cd6e618a177d31ce4099f41f72c"} Jan 04 12:12:20 crc kubenswrapper[5003]: I0104 12:12:20.241821 5003 scope.go:117] "RemoveContainer" containerID="b5c68afcbdf36910913f01f74e14f8f3735e6d961665edf03efbc006b7d28267" Jan 04 12:12:20 crc kubenswrapper[5003]: I0104 12:12:20.241965 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-647df7b8c5-wg2m4" Jan 04 12:12:20 crc kubenswrapper[5003]: I0104 12:12:20.271644 5003 scope.go:117] "RemoveContainer" containerID="25d3a76041f4786e42243668f2f97d5669cc3b3b9db2ef361de72a3e2ff1b53d" Jan 04 12:12:20 crc kubenswrapper[5003]: I0104 12:12:20.275138 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.676750508 podStartE2EDuration="6.275097802s" podCreationTimestamp="2026-01-04 12:12:14 +0000 UTC" firstStartedPulling="2026-01-04 12:12:15.371453043 +0000 UTC m=+1450.844482874" lastFinishedPulling="2026-01-04 12:12:18.969800317 +0000 UTC m=+1454.442830168" observedRunningTime="2026-01-04 12:12:20.266670132 +0000 UTC m=+1455.739699993" watchObservedRunningTime="2026-01-04 12:12:20.275097802 +0000 UTC m=+1455.748127643" Jan 04 12:12:20 crc kubenswrapper[5003]: I0104 12:12:20.296387 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-647df7b8c5-wg2m4"] Jan 04 12:12:20 crc kubenswrapper[5003]: I0104 12:12:20.299622 5003 scope.go:117] "RemoveContainer" containerID="b5c68afcbdf36910913f01f74e14f8f3735e6d961665edf03efbc006b7d28267" Jan 04 12:12:20 crc kubenswrapper[5003]: E0104 12:12:20.300346 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5c68afcbdf36910913f01f74e14f8f3735e6d961665edf03efbc006b7d28267\": container with ID starting with b5c68afcbdf36910913f01f74e14f8f3735e6d961665edf03efbc006b7d28267 not found: ID does not exist" containerID="b5c68afcbdf36910913f01f74e14f8f3735e6d961665edf03efbc006b7d28267" Jan 04 12:12:20 crc kubenswrapper[5003]: I0104 12:12:20.300395 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5c68afcbdf36910913f01f74e14f8f3735e6d961665edf03efbc006b7d28267"} err="failed to get container status \"b5c68afcbdf36910913f01f74e14f8f3735e6d961665edf03efbc006b7d28267\": rpc error: code = NotFound desc = could not find container \"b5c68afcbdf36910913f01f74e14f8f3735e6d961665edf03efbc006b7d28267\": container with ID starting with b5c68afcbdf36910913f01f74e14f8f3735e6d961665edf03efbc006b7d28267 not found: ID does not exist" Jan 04 12:12:20 crc kubenswrapper[5003]: I0104 12:12:20.300429 5003 scope.go:117] "RemoveContainer" containerID="25d3a76041f4786e42243668f2f97d5669cc3b3b9db2ef361de72a3e2ff1b53d" Jan 04 12:12:20 crc kubenswrapper[5003]: E0104 12:12:20.301111 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25d3a76041f4786e42243668f2f97d5669cc3b3b9db2ef361de72a3e2ff1b53d\": container with ID starting with 25d3a76041f4786e42243668f2f97d5669cc3b3b9db2ef361de72a3e2ff1b53d not found: ID does not exist" containerID="25d3a76041f4786e42243668f2f97d5669cc3b3b9db2ef361de72a3e2ff1b53d" Jan 04 12:12:20 crc kubenswrapper[5003]: I0104 12:12:20.301178 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25d3a76041f4786e42243668f2f97d5669cc3b3b9db2ef361de72a3e2ff1b53d"} err="failed to get container status \"25d3a76041f4786e42243668f2f97d5669cc3b3b9db2ef361de72a3e2ff1b53d\": rpc error: code = NotFound desc = could not find container \"25d3a76041f4786e42243668f2f97d5669cc3b3b9db2ef361de72a3e2ff1b53d\": container with ID starting with 25d3a76041f4786e42243668f2f97d5669cc3b3b9db2ef361de72a3e2ff1b53d not found: ID does not exist" Jan 04 12:12:20 crc kubenswrapper[5003]: I0104 12:12:20.305444 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-647df7b8c5-wg2m4"] Jan 04 12:12:20 crc kubenswrapper[5003]: I0104 12:12:20.479135 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:12:20 crc kubenswrapper[5003]: I0104 12:12:20.505215 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:12:20 crc kubenswrapper[5003]: I0104 12:12:20.823634 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4" path="/var/lib/kubelet/pods/5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4/volumes" Jan 04 12:12:21 crc kubenswrapper[5003]: I0104 12:12:21.286643 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:12:21 crc kubenswrapper[5003]: I0104 12:12:21.570814 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-k4dms"] Jan 04 12:12:21 crc kubenswrapper[5003]: E0104 12:12:21.571856 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4" containerName="init" Jan 04 12:12:21 crc kubenswrapper[5003]: I0104 12:12:21.571884 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4" containerName="init" Jan 04 12:12:21 crc kubenswrapper[5003]: E0104 12:12:21.571923 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4" containerName="dnsmasq-dns" Jan 04 12:12:21 crc kubenswrapper[5003]: I0104 12:12:21.571931 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4" containerName="dnsmasq-dns" Jan 04 12:12:21 crc kubenswrapper[5003]: I0104 12:12:21.572344 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="5593eeb6-dcf9-4cc1-bc7d-55fd6c9b46c4" containerName="dnsmasq-dns" Jan 04 12:12:21 crc kubenswrapper[5003]: I0104 12:12:21.573229 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-k4dms" Jan 04 12:12:21 crc kubenswrapper[5003]: I0104 12:12:21.576956 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Jan 04 12:12:21 crc kubenswrapper[5003]: I0104 12:12:21.576978 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Jan 04 12:12:21 crc kubenswrapper[5003]: I0104 12:12:21.586896 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-k4dms"] Jan 04 12:12:21 crc kubenswrapper[5003]: I0104 12:12:21.648034 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f65226b6-f859-4771-9ed2-37808a129af2-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-k4dms\" (UID: \"f65226b6-f859-4771-9ed2-37808a129af2\") " pod="openstack/nova-cell1-cell-mapping-k4dms" Jan 04 12:12:21 crc kubenswrapper[5003]: I0104 12:12:21.648576 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f65226b6-f859-4771-9ed2-37808a129af2-config-data\") pod \"nova-cell1-cell-mapping-k4dms\" (UID: \"f65226b6-f859-4771-9ed2-37808a129af2\") " pod="openstack/nova-cell1-cell-mapping-k4dms" Jan 04 12:12:21 crc kubenswrapper[5003]: I0104 12:12:21.648722 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vpt4z\" (UniqueName: \"kubernetes.io/projected/f65226b6-f859-4771-9ed2-37808a129af2-kube-api-access-vpt4z\") pod \"nova-cell1-cell-mapping-k4dms\" (UID: \"f65226b6-f859-4771-9ed2-37808a129af2\") " pod="openstack/nova-cell1-cell-mapping-k4dms" Jan 04 12:12:21 crc kubenswrapper[5003]: I0104 12:12:21.648943 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f65226b6-f859-4771-9ed2-37808a129af2-scripts\") pod \"nova-cell1-cell-mapping-k4dms\" (UID: \"f65226b6-f859-4771-9ed2-37808a129af2\") " pod="openstack/nova-cell1-cell-mapping-k4dms" Jan 04 12:12:21 crc kubenswrapper[5003]: I0104 12:12:21.751149 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f65226b6-f859-4771-9ed2-37808a129af2-scripts\") pod \"nova-cell1-cell-mapping-k4dms\" (UID: \"f65226b6-f859-4771-9ed2-37808a129af2\") " pod="openstack/nova-cell1-cell-mapping-k4dms" Jan 04 12:12:21 crc kubenswrapper[5003]: I0104 12:12:21.751228 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f65226b6-f859-4771-9ed2-37808a129af2-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-k4dms\" (UID: \"f65226b6-f859-4771-9ed2-37808a129af2\") " pod="openstack/nova-cell1-cell-mapping-k4dms" Jan 04 12:12:21 crc kubenswrapper[5003]: I0104 12:12:21.751369 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f65226b6-f859-4771-9ed2-37808a129af2-config-data\") pod \"nova-cell1-cell-mapping-k4dms\" (UID: \"f65226b6-f859-4771-9ed2-37808a129af2\") " pod="openstack/nova-cell1-cell-mapping-k4dms" Jan 04 12:12:21 crc kubenswrapper[5003]: I0104 12:12:21.751416 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vpt4z\" (UniqueName: \"kubernetes.io/projected/f65226b6-f859-4771-9ed2-37808a129af2-kube-api-access-vpt4z\") pod \"nova-cell1-cell-mapping-k4dms\" (UID: \"f65226b6-f859-4771-9ed2-37808a129af2\") " pod="openstack/nova-cell1-cell-mapping-k4dms" Jan 04 12:12:21 crc kubenswrapper[5003]: I0104 12:12:21.759686 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f65226b6-f859-4771-9ed2-37808a129af2-scripts\") pod \"nova-cell1-cell-mapping-k4dms\" (UID: \"f65226b6-f859-4771-9ed2-37808a129af2\") " pod="openstack/nova-cell1-cell-mapping-k4dms" Jan 04 12:12:21 crc kubenswrapper[5003]: I0104 12:12:21.759842 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f65226b6-f859-4771-9ed2-37808a129af2-config-data\") pod \"nova-cell1-cell-mapping-k4dms\" (UID: \"f65226b6-f859-4771-9ed2-37808a129af2\") " pod="openstack/nova-cell1-cell-mapping-k4dms" Jan 04 12:12:21 crc kubenswrapper[5003]: I0104 12:12:21.760802 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f65226b6-f859-4771-9ed2-37808a129af2-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-k4dms\" (UID: \"f65226b6-f859-4771-9ed2-37808a129af2\") " pod="openstack/nova-cell1-cell-mapping-k4dms" Jan 04 12:12:21 crc kubenswrapper[5003]: I0104 12:12:21.788062 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vpt4z\" (UniqueName: \"kubernetes.io/projected/f65226b6-f859-4771-9ed2-37808a129af2-kube-api-access-vpt4z\") pod \"nova-cell1-cell-mapping-k4dms\" (UID: \"f65226b6-f859-4771-9ed2-37808a129af2\") " pod="openstack/nova-cell1-cell-mapping-k4dms" Jan 04 12:12:21 crc kubenswrapper[5003]: I0104 12:12:21.891631 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-k4dms" Jan 04 12:12:22 crc kubenswrapper[5003]: I0104 12:12:22.551840 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-k4dms"] Jan 04 12:12:23 crc kubenswrapper[5003]: I0104 12:12:23.304720 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-k4dms" event={"ID":"f65226b6-f859-4771-9ed2-37808a129af2","Type":"ContainerStarted","Data":"e83484bf8f6d67f08352ddbaa45d3a145e808fc097c9292bc7185dc5bfcfc109"} Jan 04 12:12:23 crc kubenswrapper[5003]: I0104 12:12:23.304798 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-k4dms" event={"ID":"f65226b6-f859-4771-9ed2-37808a129af2","Type":"ContainerStarted","Data":"d826e3fc4dd0c1757ba218f28c39794bc4fe4c3c831f36275901910d7c2b310e"} Jan 04 12:12:23 crc kubenswrapper[5003]: I0104 12:12:23.343516 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-k4dms" podStartSLOduration=2.343484485 podStartE2EDuration="2.343484485s" podCreationTimestamp="2026-01-04 12:12:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:12:23.325851784 +0000 UTC m=+1458.798881625" watchObservedRunningTime="2026-01-04 12:12:23.343484485 +0000 UTC m=+1458.816514336" Jan 04 12:12:26 crc kubenswrapper[5003]: I0104 12:12:26.553317 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 04 12:12:26 crc kubenswrapper[5003]: I0104 12:12:26.553734 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 04 12:12:27 crc kubenswrapper[5003]: I0104 12:12:27.569285 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d672a129-34ab-4b75-9ad7-b9bd82c532da" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.203:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 04 12:12:27 crc kubenswrapper[5003]: I0104 12:12:27.569717 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d672a129-34ab-4b75-9ad7-b9bd82c532da" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.203:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 04 12:12:28 crc kubenswrapper[5003]: I0104 12:12:28.387515 5003 generic.go:334] "Generic (PLEG): container finished" podID="f65226b6-f859-4771-9ed2-37808a129af2" containerID="e83484bf8f6d67f08352ddbaa45d3a145e808fc097c9292bc7185dc5bfcfc109" exitCode=0 Jan 04 12:12:28 crc kubenswrapper[5003]: I0104 12:12:28.387741 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-k4dms" event={"ID":"f65226b6-f859-4771-9ed2-37808a129af2","Type":"ContainerDied","Data":"e83484bf8f6d67f08352ddbaa45d3a145e808fc097c9292bc7185dc5bfcfc109"} Jan 04 12:12:29 crc kubenswrapper[5003]: I0104 12:12:29.846761 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-k4dms" Jan 04 12:12:29 crc kubenswrapper[5003]: I0104 12:12:29.877551 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vpt4z\" (UniqueName: \"kubernetes.io/projected/f65226b6-f859-4771-9ed2-37808a129af2-kube-api-access-vpt4z\") pod \"f65226b6-f859-4771-9ed2-37808a129af2\" (UID: \"f65226b6-f859-4771-9ed2-37808a129af2\") " Jan 04 12:12:29 crc kubenswrapper[5003]: I0104 12:12:29.877664 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f65226b6-f859-4771-9ed2-37808a129af2-config-data\") pod \"f65226b6-f859-4771-9ed2-37808a129af2\" (UID: \"f65226b6-f859-4771-9ed2-37808a129af2\") " Jan 04 12:12:29 crc kubenswrapper[5003]: I0104 12:12:29.877947 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f65226b6-f859-4771-9ed2-37808a129af2-combined-ca-bundle\") pod \"f65226b6-f859-4771-9ed2-37808a129af2\" (UID: \"f65226b6-f859-4771-9ed2-37808a129af2\") " Jan 04 12:12:29 crc kubenswrapper[5003]: I0104 12:12:29.878079 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f65226b6-f859-4771-9ed2-37808a129af2-scripts\") pod \"f65226b6-f859-4771-9ed2-37808a129af2\" (UID: \"f65226b6-f859-4771-9ed2-37808a129af2\") " Jan 04 12:12:29 crc kubenswrapper[5003]: I0104 12:12:29.901224 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f65226b6-f859-4771-9ed2-37808a129af2-scripts" (OuterVolumeSpecName: "scripts") pod "f65226b6-f859-4771-9ed2-37808a129af2" (UID: "f65226b6-f859-4771-9ed2-37808a129af2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:29 crc kubenswrapper[5003]: I0104 12:12:29.901374 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f65226b6-f859-4771-9ed2-37808a129af2-kube-api-access-vpt4z" (OuterVolumeSpecName: "kube-api-access-vpt4z") pod "f65226b6-f859-4771-9ed2-37808a129af2" (UID: "f65226b6-f859-4771-9ed2-37808a129af2"). InnerVolumeSpecName "kube-api-access-vpt4z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:29 crc kubenswrapper[5003]: I0104 12:12:29.908266 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f65226b6-f859-4771-9ed2-37808a129af2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f65226b6-f859-4771-9ed2-37808a129af2" (UID: "f65226b6-f859-4771-9ed2-37808a129af2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:29 crc kubenswrapper[5003]: I0104 12:12:29.919117 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f65226b6-f859-4771-9ed2-37808a129af2-config-data" (OuterVolumeSpecName: "config-data") pod "f65226b6-f859-4771-9ed2-37808a129af2" (UID: "f65226b6-f859-4771-9ed2-37808a129af2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:29 crc kubenswrapper[5003]: I0104 12:12:29.980286 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f65226b6-f859-4771-9ed2-37808a129af2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:29 crc kubenswrapper[5003]: I0104 12:12:29.980595 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f65226b6-f859-4771-9ed2-37808a129af2-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:29 crc kubenswrapper[5003]: I0104 12:12:29.980675 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vpt4z\" (UniqueName: \"kubernetes.io/projected/f65226b6-f859-4771-9ed2-37808a129af2-kube-api-access-vpt4z\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:29 crc kubenswrapper[5003]: I0104 12:12:29.980747 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f65226b6-f859-4771-9ed2-37808a129af2-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:30 crc kubenswrapper[5003]: I0104 12:12:30.414358 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-k4dms" event={"ID":"f65226b6-f859-4771-9ed2-37808a129af2","Type":"ContainerDied","Data":"d826e3fc4dd0c1757ba218f28c39794bc4fe4c3c831f36275901910d7c2b310e"} Jan 04 12:12:30 crc kubenswrapper[5003]: I0104 12:12:30.414750 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d826e3fc4dd0c1757ba218f28c39794bc4fe4c3c831f36275901910d7c2b310e" Jan 04 12:12:30 crc kubenswrapper[5003]: I0104 12:12:30.414470 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-k4dms" Jan 04 12:12:30 crc kubenswrapper[5003]: I0104 12:12:30.603564 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:12:30 crc kubenswrapper[5003]: I0104 12:12:30.603877 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d672a129-34ab-4b75-9ad7-b9bd82c532da" containerName="nova-api-log" containerID="cri-o://a595bc4dc1cb538aa4b9367357d35bd9b2969ba0b4ad4cf26c5a222879433ef7" gracePeriod=30 Jan 04 12:12:30 crc kubenswrapper[5003]: I0104 12:12:30.604043 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d672a129-34ab-4b75-9ad7-b9bd82c532da" containerName="nova-api-api" containerID="cri-o://85f7ac1a28aa2dd3250d84bab825195f82f517d25555ba79be993484e503432a" gracePeriod=30 Jan 04 12:12:30 crc kubenswrapper[5003]: I0104 12:12:30.612958 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:12:30 crc kubenswrapper[5003]: I0104 12:12:30.613204 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="37d57f7a-9e52-4f46-978b-20ba81ad8b22" containerName="nova-scheduler-scheduler" containerID="cri-o://8021a98cb925a34cef7ba24c638cdc7931569a63a525db07fb10719840c4cf6b" gracePeriod=30 Jan 04 12:12:30 crc kubenswrapper[5003]: I0104 12:12:30.647288 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:12:30 crc kubenswrapper[5003]: I0104 12:12:30.647597 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="318a10f9-0b3b-46c4-a3d7-978d795b67e4" containerName="nova-metadata-log" containerID="cri-o://6591fb08d275b9ad5bb9d8eaf8fa974fea5226307b28d5b2749f852c425ee738" gracePeriod=30 Jan 04 12:12:30 crc kubenswrapper[5003]: I0104 12:12:30.647771 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="318a10f9-0b3b-46c4-a3d7-978d795b67e4" containerName="nova-metadata-metadata" containerID="cri-o://5f0ff55a5228bd2c7cce6f8b494982689ecaf0845f8cc4b99c6855bbd52077ae" gracePeriod=30 Jan 04 12:12:31 crc kubenswrapper[5003]: I0104 12:12:31.425282 5003 generic.go:334] "Generic (PLEG): container finished" podID="318a10f9-0b3b-46c4-a3d7-978d795b67e4" containerID="6591fb08d275b9ad5bb9d8eaf8fa974fea5226307b28d5b2749f852c425ee738" exitCode=143 Jan 04 12:12:31 crc kubenswrapper[5003]: I0104 12:12:31.425379 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"318a10f9-0b3b-46c4-a3d7-978d795b67e4","Type":"ContainerDied","Data":"6591fb08d275b9ad5bb9d8eaf8fa974fea5226307b28d5b2749f852c425ee738"} Jan 04 12:12:31 crc kubenswrapper[5003]: I0104 12:12:31.428424 5003 generic.go:334] "Generic (PLEG): container finished" podID="d672a129-34ab-4b75-9ad7-b9bd82c532da" containerID="a595bc4dc1cb538aa4b9367357d35bd9b2969ba0b4ad4cf26c5a222879433ef7" exitCode=143 Jan 04 12:12:31 crc kubenswrapper[5003]: I0104 12:12:31.428450 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d672a129-34ab-4b75-9ad7-b9bd82c532da","Type":"ContainerDied","Data":"a595bc4dc1cb538aa4b9367357d35bd9b2969ba0b4ad4cf26c5a222879433ef7"} Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.278218 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.351167 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t48kr\" (UniqueName: \"kubernetes.io/projected/37d57f7a-9e52-4f46-978b-20ba81ad8b22-kube-api-access-t48kr\") pod \"37d57f7a-9e52-4f46-978b-20ba81ad8b22\" (UID: \"37d57f7a-9e52-4f46-978b-20ba81ad8b22\") " Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.351269 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37d57f7a-9e52-4f46-978b-20ba81ad8b22-combined-ca-bundle\") pod \"37d57f7a-9e52-4f46-978b-20ba81ad8b22\" (UID: \"37d57f7a-9e52-4f46-978b-20ba81ad8b22\") " Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.351513 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37d57f7a-9e52-4f46-978b-20ba81ad8b22-config-data\") pod \"37d57f7a-9e52-4f46-978b-20ba81ad8b22\" (UID: \"37d57f7a-9e52-4f46-978b-20ba81ad8b22\") " Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.359738 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37d57f7a-9e52-4f46-978b-20ba81ad8b22-kube-api-access-t48kr" (OuterVolumeSpecName: "kube-api-access-t48kr") pod "37d57f7a-9e52-4f46-978b-20ba81ad8b22" (UID: "37d57f7a-9e52-4f46-978b-20ba81ad8b22"). InnerVolumeSpecName "kube-api-access-t48kr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.383484 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37d57f7a-9e52-4f46-978b-20ba81ad8b22-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "37d57f7a-9e52-4f46-978b-20ba81ad8b22" (UID: "37d57f7a-9e52-4f46-978b-20ba81ad8b22"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.392254 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37d57f7a-9e52-4f46-978b-20ba81ad8b22-config-data" (OuterVolumeSpecName: "config-data") pod "37d57f7a-9e52-4f46-978b-20ba81ad8b22" (UID: "37d57f7a-9e52-4f46-978b-20ba81ad8b22"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.448586 5003 generic.go:334] "Generic (PLEG): container finished" podID="37d57f7a-9e52-4f46-978b-20ba81ad8b22" containerID="8021a98cb925a34cef7ba24c638cdc7931569a63a525db07fb10719840c4cf6b" exitCode=0 Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.448640 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"37d57f7a-9e52-4f46-978b-20ba81ad8b22","Type":"ContainerDied","Data":"8021a98cb925a34cef7ba24c638cdc7931569a63a525db07fb10719840c4cf6b"} Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.448668 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"37d57f7a-9e52-4f46-978b-20ba81ad8b22","Type":"ContainerDied","Data":"35d7f6320bfed1bb90d211e94f67782b848a2ecedc694cc010a9f2fb974e6279"} Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.448685 5003 scope.go:117] "RemoveContainer" containerID="8021a98cb925a34cef7ba24c638cdc7931569a63a525db07fb10719840c4cf6b" Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.448693 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.453921 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37d57f7a-9e52-4f46-978b-20ba81ad8b22-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.453947 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37d57f7a-9e52-4f46-978b-20ba81ad8b22-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.453958 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t48kr\" (UniqueName: \"kubernetes.io/projected/37d57f7a-9e52-4f46-978b-20ba81ad8b22-kube-api-access-t48kr\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.483741 5003 scope.go:117] "RemoveContainer" containerID="8021a98cb925a34cef7ba24c638cdc7931569a63a525db07fb10719840c4cf6b" Jan 04 12:12:33 crc kubenswrapper[5003]: E0104 12:12:33.484310 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8021a98cb925a34cef7ba24c638cdc7931569a63a525db07fb10719840c4cf6b\": container with ID starting with 8021a98cb925a34cef7ba24c638cdc7931569a63a525db07fb10719840c4cf6b not found: ID does not exist" containerID="8021a98cb925a34cef7ba24c638cdc7931569a63a525db07fb10719840c4cf6b" Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.484359 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8021a98cb925a34cef7ba24c638cdc7931569a63a525db07fb10719840c4cf6b"} err="failed to get container status \"8021a98cb925a34cef7ba24c638cdc7931569a63a525db07fb10719840c4cf6b\": rpc error: code = NotFound desc = could not find container \"8021a98cb925a34cef7ba24c638cdc7931569a63a525db07fb10719840c4cf6b\": container with ID starting with 8021a98cb925a34cef7ba24c638cdc7931569a63a525db07fb10719840c4cf6b not found: ID does not exist" Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.491461 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.505532 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.517885 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:12:33 crc kubenswrapper[5003]: E0104 12:12:33.518422 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37d57f7a-9e52-4f46-978b-20ba81ad8b22" containerName="nova-scheduler-scheduler" Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.518446 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="37d57f7a-9e52-4f46-978b-20ba81ad8b22" containerName="nova-scheduler-scheduler" Jan 04 12:12:33 crc kubenswrapper[5003]: E0104 12:12:33.518465 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f65226b6-f859-4771-9ed2-37808a129af2" containerName="nova-manage" Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.518472 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f65226b6-f859-4771-9ed2-37808a129af2" containerName="nova-manage" Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.518706 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="37d57f7a-9e52-4f46-978b-20ba81ad8b22" containerName="nova-scheduler-scheduler" Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.518732 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f65226b6-f859-4771-9ed2-37808a129af2" containerName="nova-manage" Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.519435 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.524548 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.527816 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.555519 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/295f88c8-79ac-463f-85e3-d98dc15dd06f-config-data\") pod \"nova-scheduler-0\" (UID: \"295f88c8-79ac-463f-85e3-d98dc15dd06f\") " pod="openstack/nova-scheduler-0" Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.555835 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-js99j\" (UniqueName: \"kubernetes.io/projected/295f88c8-79ac-463f-85e3-d98dc15dd06f-kube-api-access-js99j\") pod \"nova-scheduler-0\" (UID: \"295f88c8-79ac-463f-85e3-d98dc15dd06f\") " pod="openstack/nova-scheduler-0" Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.555983 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/295f88c8-79ac-463f-85e3-d98dc15dd06f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"295f88c8-79ac-463f-85e3-d98dc15dd06f\") " pod="openstack/nova-scheduler-0" Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.657315 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-js99j\" (UniqueName: \"kubernetes.io/projected/295f88c8-79ac-463f-85e3-d98dc15dd06f-kube-api-access-js99j\") pod \"nova-scheduler-0\" (UID: \"295f88c8-79ac-463f-85e3-d98dc15dd06f\") " pod="openstack/nova-scheduler-0" Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.657430 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/295f88c8-79ac-463f-85e3-d98dc15dd06f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"295f88c8-79ac-463f-85e3-d98dc15dd06f\") " pod="openstack/nova-scheduler-0" Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.657511 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/295f88c8-79ac-463f-85e3-d98dc15dd06f-config-data\") pod \"nova-scheduler-0\" (UID: \"295f88c8-79ac-463f-85e3-d98dc15dd06f\") " pod="openstack/nova-scheduler-0" Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.661813 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/295f88c8-79ac-463f-85e3-d98dc15dd06f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"295f88c8-79ac-463f-85e3-d98dc15dd06f\") " pod="openstack/nova-scheduler-0" Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.662776 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/295f88c8-79ac-463f-85e3-d98dc15dd06f-config-data\") pod \"nova-scheduler-0\" (UID: \"295f88c8-79ac-463f-85e3-d98dc15dd06f\") " pod="openstack/nova-scheduler-0" Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.675536 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-js99j\" (UniqueName: \"kubernetes.io/projected/295f88c8-79ac-463f-85e3-d98dc15dd06f-kube-api-access-js99j\") pod \"nova-scheduler-0\" (UID: \"295f88c8-79ac-463f-85e3-d98dc15dd06f\") " pod="openstack/nova-scheduler-0" Jan 04 12:12:33 crc kubenswrapper[5003]: I0104 12:12:33.843375 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.352924 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.463173 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"295f88c8-79ac-463f-85e3-d98dc15dd06f","Type":"ContainerStarted","Data":"7ad93d37b4c75a2cf1e65358a55a2474bd3e083e077c4e4acb7eb1bd2fb602ab"} Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.465727 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d672a129-34ab-4b75-9ad7-b9bd82c532da","Type":"ContainerDied","Data":"85f7ac1a28aa2dd3250d84bab825195f82f517d25555ba79be993484e503432a"} Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.466220 5003 generic.go:334] "Generic (PLEG): container finished" podID="d672a129-34ab-4b75-9ad7-b9bd82c532da" containerID="85f7ac1a28aa2dd3250d84bab825195f82f517d25555ba79be993484e503432a" exitCode=0 Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.468434 5003 generic.go:334] "Generic (PLEG): container finished" podID="318a10f9-0b3b-46c4-a3d7-978d795b67e4" containerID="5f0ff55a5228bd2c7cce6f8b494982689ecaf0845f8cc4b99c6855bbd52077ae" exitCode=0 Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.468459 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"318a10f9-0b3b-46c4-a3d7-978d795b67e4","Type":"ContainerDied","Data":"5f0ff55a5228bd2c7cce6f8b494982689ecaf0845f8cc4b99c6855bbd52077ae"} Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.468489 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"318a10f9-0b3b-46c4-a3d7-978d795b67e4","Type":"ContainerDied","Data":"4f0487bf6ef5efb956b64eb21c34478bfcbe3fa6710ad2dbe057261dfe986565"} Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.468506 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4f0487bf6ef5efb956b64eb21c34478bfcbe3fa6710ad2dbe057261dfe986565" Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.478263 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.489780 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.575784 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5c9hq\" (UniqueName: \"kubernetes.io/projected/318a10f9-0b3b-46c4-a3d7-978d795b67e4-kube-api-access-5c9hq\") pod \"318a10f9-0b3b-46c4-a3d7-978d795b67e4\" (UID: \"318a10f9-0b3b-46c4-a3d7-978d795b67e4\") " Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.576236 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/318a10f9-0b3b-46c4-a3d7-978d795b67e4-nova-metadata-tls-certs\") pod \"318a10f9-0b3b-46c4-a3d7-978d795b67e4\" (UID: \"318a10f9-0b3b-46c4-a3d7-978d795b67e4\") " Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.576275 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d672a129-34ab-4b75-9ad7-b9bd82c532da-internal-tls-certs\") pod \"d672a129-34ab-4b75-9ad7-b9bd82c532da\" (UID: \"d672a129-34ab-4b75-9ad7-b9bd82c532da\") " Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.576302 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d672a129-34ab-4b75-9ad7-b9bd82c532da-logs\") pod \"d672a129-34ab-4b75-9ad7-b9bd82c532da\" (UID: \"d672a129-34ab-4b75-9ad7-b9bd82c532da\") " Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.576405 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tkjv5\" (UniqueName: \"kubernetes.io/projected/d672a129-34ab-4b75-9ad7-b9bd82c532da-kube-api-access-tkjv5\") pod \"d672a129-34ab-4b75-9ad7-b9bd82c532da\" (UID: \"d672a129-34ab-4b75-9ad7-b9bd82c532da\") " Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.576441 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d672a129-34ab-4b75-9ad7-b9bd82c532da-combined-ca-bundle\") pod \"d672a129-34ab-4b75-9ad7-b9bd82c532da\" (UID: \"d672a129-34ab-4b75-9ad7-b9bd82c532da\") " Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.576492 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d672a129-34ab-4b75-9ad7-b9bd82c532da-public-tls-certs\") pod \"d672a129-34ab-4b75-9ad7-b9bd82c532da\" (UID: \"d672a129-34ab-4b75-9ad7-b9bd82c532da\") " Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.576535 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/318a10f9-0b3b-46c4-a3d7-978d795b67e4-config-data\") pod \"318a10f9-0b3b-46c4-a3d7-978d795b67e4\" (UID: \"318a10f9-0b3b-46c4-a3d7-978d795b67e4\") " Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.576576 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d672a129-34ab-4b75-9ad7-b9bd82c532da-config-data\") pod \"d672a129-34ab-4b75-9ad7-b9bd82c532da\" (UID: \"d672a129-34ab-4b75-9ad7-b9bd82c532da\") " Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.576645 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/318a10f9-0b3b-46c4-a3d7-978d795b67e4-logs\") pod \"318a10f9-0b3b-46c4-a3d7-978d795b67e4\" (UID: \"318a10f9-0b3b-46c4-a3d7-978d795b67e4\") " Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.576668 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/318a10f9-0b3b-46c4-a3d7-978d795b67e4-combined-ca-bundle\") pod \"318a10f9-0b3b-46c4-a3d7-978d795b67e4\" (UID: \"318a10f9-0b3b-46c4-a3d7-978d795b67e4\") " Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.581171 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d672a129-34ab-4b75-9ad7-b9bd82c532da-logs" (OuterVolumeSpecName: "logs") pod "d672a129-34ab-4b75-9ad7-b9bd82c532da" (UID: "d672a129-34ab-4b75-9ad7-b9bd82c532da"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.583252 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/318a10f9-0b3b-46c4-a3d7-978d795b67e4-kube-api-access-5c9hq" (OuterVolumeSpecName: "kube-api-access-5c9hq") pod "318a10f9-0b3b-46c4-a3d7-978d795b67e4" (UID: "318a10f9-0b3b-46c4-a3d7-978d795b67e4"). InnerVolumeSpecName "kube-api-access-5c9hq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.583708 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/318a10f9-0b3b-46c4-a3d7-978d795b67e4-logs" (OuterVolumeSpecName: "logs") pod "318a10f9-0b3b-46c4-a3d7-978d795b67e4" (UID: "318a10f9-0b3b-46c4-a3d7-978d795b67e4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.597326 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d672a129-34ab-4b75-9ad7-b9bd82c532da-kube-api-access-tkjv5" (OuterVolumeSpecName: "kube-api-access-tkjv5") pod "d672a129-34ab-4b75-9ad7-b9bd82c532da" (UID: "d672a129-34ab-4b75-9ad7-b9bd82c532da"). InnerVolumeSpecName "kube-api-access-tkjv5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.616280 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/318a10f9-0b3b-46c4-a3d7-978d795b67e4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "318a10f9-0b3b-46c4-a3d7-978d795b67e4" (UID: "318a10f9-0b3b-46c4-a3d7-978d795b67e4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.617283 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d672a129-34ab-4b75-9ad7-b9bd82c532da-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d672a129-34ab-4b75-9ad7-b9bd82c532da" (UID: "d672a129-34ab-4b75-9ad7-b9bd82c532da"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.623687 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d672a129-34ab-4b75-9ad7-b9bd82c532da-config-data" (OuterVolumeSpecName: "config-data") pod "d672a129-34ab-4b75-9ad7-b9bd82c532da" (UID: "d672a129-34ab-4b75-9ad7-b9bd82c532da"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.625282 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/318a10f9-0b3b-46c4-a3d7-978d795b67e4-config-data" (OuterVolumeSpecName: "config-data") pod "318a10f9-0b3b-46c4-a3d7-978d795b67e4" (UID: "318a10f9-0b3b-46c4-a3d7-978d795b67e4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.650917 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d672a129-34ab-4b75-9ad7-b9bd82c532da-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "d672a129-34ab-4b75-9ad7-b9bd82c532da" (UID: "d672a129-34ab-4b75-9ad7-b9bd82c532da"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.664483 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d672a129-34ab-4b75-9ad7-b9bd82c532da-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "d672a129-34ab-4b75-9ad7-b9bd82c532da" (UID: "d672a129-34ab-4b75-9ad7-b9bd82c532da"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.664847 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/318a10f9-0b3b-46c4-a3d7-978d795b67e4-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "318a10f9-0b3b-46c4-a3d7-978d795b67e4" (UID: "318a10f9-0b3b-46c4-a3d7-978d795b67e4"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.679354 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5c9hq\" (UniqueName: \"kubernetes.io/projected/318a10f9-0b3b-46c4-a3d7-978d795b67e4-kube-api-access-5c9hq\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.679394 5003 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/318a10f9-0b3b-46c4-a3d7-978d795b67e4-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.679409 5003 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d672a129-34ab-4b75-9ad7-b9bd82c532da-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.679422 5003 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d672a129-34ab-4b75-9ad7-b9bd82c532da-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.679435 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tkjv5\" (UniqueName: \"kubernetes.io/projected/d672a129-34ab-4b75-9ad7-b9bd82c532da-kube-api-access-tkjv5\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.679446 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d672a129-34ab-4b75-9ad7-b9bd82c532da-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.679457 5003 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d672a129-34ab-4b75-9ad7-b9bd82c532da-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.679468 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/318a10f9-0b3b-46c4-a3d7-978d795b67e4-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.679479 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d672a129-34ab-4b75-9ad7-b9bd82c532da-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.679489 5003 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/318a10f9-0b3b-46c4-a3d7-978d795b67e4-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.679499 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/318a10f9-0b3b-46c4-a3d7-978d795b67e4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:12:34 crc kubenswrapper[5003]: I0104 12:12:34.817742 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37d57f7a-9e52-4f46-978b-20ba81ad8b22" path="/var/lib/kubelet/pods/37d57f7a-9e52-4f46-978b-20ba81ad8b22/volumes" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.479449 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.479448 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d672a129-34ab-4b75-9ad7-b9bd82c532da","Type":"ContainerDied","Data":"0784f0916a8bcd0b0064769fe32196908778b0dff467bc3650fbc370eb2888ad"} Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.479576 5003 scope.go:117] "RemoveContainer" containerID="85f7ac1a28aa2dd3250d84bab825195f82f517d25555ba79be993484e503432a" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.483233 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.483835 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"295f88c8-79ac-463f-85e3-d98dc15dd06f","Type":"ContainerStarted","Data":"c3b3aa191257674d28511eb56f215bdb07ddd4b9442282756b29282013c6bebd"} Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.510714 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.512269 5003 scope.go:117] "RemoveContainer" containerID="a595bc4dc1cb538aa4b9367357d35bd9b2969ba0b4ad4cf26c5a222879433ef7" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.549322 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.590382 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Jan 04 12:12:35 crc kubenswrapper[5003]: E0104 12:12:35.591493 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="318a10f9-0b3b-46c4-a3d7-978d795b67e4" containerName="nova-metadata-log" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.591530 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="318a10f9-0b3b-46c4-a3d7-978d795b67e4" containerName="nova-metadata-log" Jan 04 12:12:35 crc kubenswrapper[5003]: E0104 12:12:35.591561 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d672a129-34ab-4b75-9ad7-b9bd82c532da" containerName="nova-api-api" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.591573 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d672a129-34ab-4b75-9ad7-b9bd82c532da" containerName="nova-api-api" Jan 04 12:12:35 crc kubenswrapper[5003]: E0104 12:12:35.591605 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="318a10f9-0b3b-46c4-a3d7-978d795b67e4" containerName="nova-metadata-metadata" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.591614 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="318a10f9-0b3b-46c4-a3d7-978d795b67e4" containerName="nova-metadata-metadata" Jan 04 12:12:35 crc kubenswrapper[5003]: E0104 12:12:35.591640 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d672a129-34ab-4b75-9ad7-b9bd82c532da" containerName="nova-api-log" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.591651 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d672a129-34ab-4b75-9ad7-b9bd82c532da" containerName="nova-api-log" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.592107 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="318a10f9-0b3b-46c4-a3d7-978d795b67e4" containerName="nova-metadata-log" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.592130 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d672a129-34ab-4b75-9ad7-b9bd82c532da" containerName="nova-api-api" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.592153 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="318a10f9-0b3b-46c4-a3d7-978d795b67e4" containerName="nova-metadata-metadata" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.592189 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d672a129-34ab-4b75-9ad7-b9bd82c532da" containerName="nova-api-log" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.598516 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.599736 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.599714236 podStartE2EDuration="2.599714236s" podCreationTimestamp="2026-01-04 12:12:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:12:35.535926769 +0000 UTC m=+1471.008956630" watchObservedRunningTime="2026-01-04 12:12:35.599714236 +0000 UTC m=+1471.072744077" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.601172 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.601942 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.602231 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.636140 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.669113 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.678123 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.689569 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.692807 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.695896 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.696842 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.698270 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4c84d72-3209-4925-9eb2-cbebcd1e8ae7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b4c84d72-3209-4925-9eb2-cbebcd1e8ae7\") " pod="openstack/nova-metadata-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.698323 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-logs\") pod \"nova-api-0\" (UID: \"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc\") " pod="openstack/nova-api-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.698349 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-config-data\") pod \"nova-api-0\" (UID: \"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc\") " pod="openstack/nova-api-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.698377 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jgzj8\" (UniqueName: \"kubernetes.io/projected/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-kube-api-access-jgzj8\") pod \"nova-api-0\" (UID: \"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc\") " pod="openstack/nova-api-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.698426 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-public-tls-certs\") pod \"nova-api-0\" (UID: \"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc\") " pod="openstack/nova-api-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.698449 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4c84d72-3209-4925-9eb2-cbebcd1e8ae7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b4c84d72-3209-4925-9eb2-cbebcd1e8ae7\") " pod="openstack/nova-metadata-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.698470 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8r2wk\" (UniqueName: \"kubernetes.io/projected/b4c84d72-3209-4925-9eb2-cbebcd1e8ae7-kube-api-access-8r2wk\") pod \"nova-metadata-0\" (UID: \"b4c84d72-3209-4925-9eb2-cbebcd1e8ae7\") " pod="openstack/nova-metadata-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.698504 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4c84d72-3209-4925-9eb2-cbebcd1e8ae7-logs\") pod \"nova-metadata-0\" (UID: \"b4c84d72-3209-4925-9eb2-cbebcd1e8ae7\") " pod="openstack/nova-metadata-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.698520 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4c84d72-3209-4925-9eb2-cbebcd1e8ae7-config-data\") pod \"nova-metadata-0\" (UID: \"b4c84d72-3209-4925-9eb2-cbebcd1e8ae7\") " pod="openstack/nova-metadata-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.698536 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-internal-tls-certs\") pod \"nova-api-0\" (UID: \"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc\") " pod="openstack/nova-api-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.698725 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc\") " pod="openstack/nova-api-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.714507 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.801200 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc\") " pod="openstack/nova-api-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.801296 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4c84d72-3209-4925-9eb2-cbebcd1e8ae7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b4c84d72-3209-4925-9eb2-cbebcd1e8ae7\") " pod="openstack/nova-metadata-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.801371 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-logs\") pod \"nova-api-0\" (UID: \"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc\") " pod="openstack/nova-api-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.801422 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-config-data\") pod \"nova-api-0\" (UID: \"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc\") " pod="openstack/nova-api-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.801469 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jgzj8\" (UniqueName: \"kubernetes.io/projected/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-kube-api-access-jgzj8\") pod \"nova-api-0\" (UID: \"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc\") " pod="openstack/nova-api-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.801565 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-public-tls-certs\") pod \"nova-api-0\" (UID: \"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc\") " pod="openstack/nova-api-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.801608 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4c84d72-3209-4925-9eb2-cbebcd1e8ae7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b4c84d72-3209-4925-9eb2-cbebcd1e8ae7\") " pod="openstack/nova-metadata-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.801684 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8r2wk\" (UniqueName: \"kubernetes.io/projected/b4c84d72-3209-4925-9eb2-cbebcd1e8ae7-kube-api-access-8r2wk\") pod \"nova-metadata-0\" (UID: \"b4c84d72-3209-4925-9eb2-cbebcd1e8ae7\") " pod="openstack/nova-metadata-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.801748 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4c84d72-3209-4925-9eb2-cbebcd1e8ae7-logs\") pod \"nova-metadata-0\" (UID: \"b4c84d72-3209-4925-9eb2-cbebcd1e8ae7\") " pod="openstack/nova-metadata-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.801778 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4c84d72-3209-4925-9eb2-cbebcd1e8ae7-config-data\") pod \"nova-metadata-0\" (UID: \"b4c84d72-3209-4925-9eb2-cbebcd1e8ae7\") " pod="openstack/nova-metadata-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.801803 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-internal-tls-certs\") pod \"nova-api-0\" (UID: \"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc\") " pod="openstack/nova-api-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.802062 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-logs\") pod \"nova-api-0\" (UID: \"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc\") " pod="openstack/nova-api-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.802765 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4c84d72-3209-4925-9eb2-cbebcd1e8ae7-logs\") pod \"nova-metadata-0\" (UID: \"b4c84d72-3209-4925-9eb2-cbebcd1e8ae7\") " pod="openstack/nova-metadata-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.807907 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4c84d72-3209-4925-9eb2-cbebcd1e8ae7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b4c84d72-3209-4925-9eb2-cbebcd1e8ae7\") " pod="openstack/nova-metadata-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.808394 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-public-tls-certs\") pod \"nova-api-0\" (UID: \"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc\") " pod="openstack/nova-api-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.808633 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-config-data\") pod \"nova-api-0\" (UID: \"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc\") " pod="openstack/nova-api-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.809244 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4c84d72-3209-4925-9eb2-cbebcd1e8ae7-config-data\") pod \"nova-metadata-0\" (UID: \"b4c84d72-3209-4925-9eb2-cbebcd1e8ae7\") " pod="openstack/nova-metadata-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.809240 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-internal-tls-certs\") pod \"nova-api-0\" (UID: \"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc\") " pod="openstack/nova-api-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.816586 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jgzj8\" (UniqueName: \"kubernetes.io/projected/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-kube-api-access-jgzj8\") pod \"nova-api-0\" (UID: \"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc\") " pod="openstack/nova-api-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.821116 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4c84d72-3209-4925-9eb2-cbebcd1e8ae7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b4c84d72-3209-4925-9eb2-cbebcd1e8ae7\") " pod="openstack/nova-metadata-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.821153 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc\") " pod="openstack/nova-api-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.823704 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8r2wk\" (UniqueName: \"kubernetes.io/projected/b4c84d72-3209-4925-9eb2-cbebcd1e8ae7-kube-api-access-8r2wk\") pod \"nova-metadata-0\" (UID: \"b4c84d72-3209-4925-9eb2-cbebcd1e8ae7\") " pod="openstack/nova-metadata-0" Jan 04 12:12:35 crc kubenswrapper[5003]: I0104 12:12:35.952864 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 04 12:12:36 crc kubenswrapper[5003]: I0104 12:12:36.018245 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 04 12:12:36 crc kubenswrapper[5003]: I0104 12:12:36.446745 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:12:36 crc kubenswrapper[5003]: W0104 12:12:36.451263 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0afbe6e8_b5bd_4cc7_80df_06cbc395e4bc.slice/crio-3d3e15b330c64221f5822e535c747346dcf75ad8ef1ec7f171c687c7c7388cf9 WatchSource:0}: Error finding container 3d3e15b330c64221f5822e535c747346dcf75ad8ef1ec7f171c687c7c7388cf9: Status 404 returned error can't find the container with id 3d3e15b330c64221f5822e535c747346dcf75ad8ef1ec7f171c687c7c7388cf9 Jan 04 12:12:36 crc kubenswrapper[5003]: I0104 12:12:36.507448 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc","Type":"ContainerStarted","Data":"3d3e15b330c64221f5822e535c747346dcf75ad8ef1ec7f171c687c7c7388cf9"} Jan 04 12:12:36 crc kubenswrapper[5003]: I0104 12:12:36.528239 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:12:36 crc kubenswrapper[5003]: W0104 12:12:36.531418 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb4c84d72_3209_4925_9eb2_cbebcd1e8ae7.slice/crio-339fc6a2c3d87ae14805161c26cba53699a18a3387d7641a5b00e92740038d5e WatchSource:0}: Error finding container 339fc6a2c3d87ae14805161c26cba53699a18a3387d7641a5b00e92740038d5e: Status 404 returned error can't find the container with id 339fc6a2c3d87ae14805161c26cba53699a18a3387d7641a5b00e92740038d5e Jan 04 12:12:36 crc kubenswrapper[5003]: I0104 12:12:36.817588 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="318a10f9-0b3b-46c4-a3d7-978d795b67e4" path="/var/lib/kubelet/pods/318a10f9-0b3b-46c4-a3d7-978d795b67e4/volumes" Jan 04 12:12:36 crc kubenswrapper[5003]: I0104 12:12:36.818876 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d672a129-34ab-4b75-9ad7-b9bd82c532da" path="/var/lib/kubelet/pods/d672a129-34ab-4b75-9ad7-b9bd82c532da/volumes" Jan 04 12:12:37 crc kubenswrapper[5003]: I0104 12:12:37.536764 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc","Type":"ContainerStarted","Data":"0251462325e49cea189f787676ef9b788f1da3cfefece6459ad475f7a74cd5c1"} Jan 04 12:12:37 crc kubenswrapper[5003]: I0104 12:12:37.536824 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc","Type":"ContainerStarted","Data":"a36128483858c51bdf228f1c485a058ce56e3cb51fe588687795efca501f2855"} Jan 04 12:12:37 crc kubenswrapper[5003]: I0104 12:12:37.546365 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b4c84d72-3209-4925-9eb2-cbebcd1e8ae7","Type":"ContainerStarted","Data":"88cf151cf804256f12f3d9496a2faf5cc39d58a9955cca558c56b93f8e9f6281"} Jan 04 12:12:37 crc kubenswrapper[5003]: I0104 12:12:37.546425 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b4c84d72-3209-4925-9eb2-cbebcd1e8ae7","Type":"ContainerStarted","Data":"a03dc47ea5678d35499c7d2c162e59903757ffa22b0c3abb3b985027ba48121d"} Jan 04 12:12:37 crc kubenswrapper[5003]: I0104 12:12:37.546439 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b4c84d72-3209-4925-9eb2-cbebcd1e8ae7","Type":"ContainerStarted","Data":"339fc6a2c3d87ae14805161c26cba53699a18a3387d7641a5b00e92740038d5e"} Jan 04 12:12:37 crc kubenswrapper[5003]: I0104 12:12:37.581593 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.5815684819999998 podStartE2EDuration="2.581568482s" podCreationTimestamp="2026-01-04 12:12:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:12:37.566358045 +0000 UTC m=+1473.039387926" watchObservedRunningTime="2026-01-04 12:12:37.581568482 +0000 UTC m=+1473.054598333" Jan 04 12:12:37 crc kubenswrapper[5003]: I0104 12:12:37.591285 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.591216494 podStartE2EDuration="2.591216494s" podCreationTimestamp="2026-01-04 12:12:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:12:37.589181091 +0000 UTC m=+1473.062210972" watchObservedRunningTime="2026-01-04 12:12:37.591216494 +0000 UTC m=+1473.064246335" Jan 04 12:12:38 crc kubenswrapper[5003]: I0104 12:12:38.844343 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Jan 04 12:12:41 crc kubenswrapper[5003]: I0104 12:12:41.018571 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 04 12:12:41 crc kubenswrapper[5003]: I0104 12:12:41.019463 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Jan 04 12:12:43 crc kubenswrapper[5003]: I0104 12:12:43.843657 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Jan 04 12:12:43 crc kubenswrapper[5003]: I0104 12:12:43.872617 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Jan 04 12:12:44 crc kubenswrapper[5003]: I0104 12:12:44.683953 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Jan 04 12:12:44 crc kubenswrapper[5003]: I0104 12:12:44.854605 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Jan 04 12:12:45 crc kubenswrapper[5003]: I0104 12:12:45.953405 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 04 12:12:45 crc kubenswrapper[5003]: I0104 12:12:45.953851 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Jan 04 12:12:46 crc kubenswrapper[5003]: I0104 12:12:46.018558 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 04 12:12:46 crc kubenswrapper[5003]: I0104 12:12:46.018602 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Jan 04 12:12:46 crc kubenswrapper[5003]: I0104 12:12:46.968264 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.206:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 04 12:12:46 crc kubenswrapper[5003]: I0104 12:12:46.969243 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.206:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 04 12:12:47 crc kubenswrapper[5003]: I0104 12:12:47.039156 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="b4c84d72-3209-4925-9eb2-cbebcd1e8ae7" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.207:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 04 12:12:47 crc kubenswrapper[5003]: I0104 12:12:47.039527 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="b4c84d72-3209-4925-9eb2-cbebcd1e8ae7" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.207:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 04 12:12:55 crc kubenswrapper[5003]: I0104 12:12:55.966956 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 04 12:12:55 crc kubenswrapper[5003]: I0104 12:12:55.968762 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 04 12:12:55 crc kubenswrapper[5003]: I0104 12:12:55.975479 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Jan 04 12:12:55 crc kubenswrapper[5003]: I0104 12:12:55.977189 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 04 12:12:56 crc kubenswrapper[5003]: I0104 12:12:56.029543 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 04 12:12:56 crc kubenswrapper[5003]: I0104 12:12:56.031293 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Jan 04 12:12:56 crc kubenswrapper[5003]: I0104 12:12:56.034976 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 04 12:12:56 crc kubenswrapper[5003]: I0104 12:12:56.784648 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Jan 04 12:12:56 crc kubenswrapper[5003]: I0104 12:12:56.793411 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Jan 04 12:12:56 crc kubenswrapper[5003]: I0104 12:12:56.795491 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Jan 04 12:13:09 crc kubenswrapper[5003]: I0104 12:13:09.418761 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:13:09 crc kubenswrapper[5003]: I0104 12:13:09.419632 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.448624 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-daa1-account-create-update-m44v2"] Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.450518 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-daa1-account-create-update-m44v2" Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.506104 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.528120 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-kn44b"] Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.538277 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-kn44b" Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.558930 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.559258 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-daa1-account-create-update-m44v2"] Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.579527 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-kn44b"] Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.609218 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmxdk\" (UniqueName: \"kubernetes.io/projected/fba35580-43f2-4d96-8c52-6da2b5fdbd94-kube-api-access-nmxdk\") pod \"neutron-daa1-account-create-update-m44v2\" (UID: \"fba35580-43f2-4d96-8c52-6da2b5fdbd94\") " pod="openstack/neutron-daa1-account-create-update-m44v2" Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.609347 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9f2d1a12-b4ce-48c2-88b0-5ae881760963-operator-scripts\") pod \"root-account-create-update-kn44b\" (UID: \"9f2d1a12-b4ce-48c2-88b0-5ae881760963\") " pod="openstack/root-account-create-update-kn44b" Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.609598 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fba35580-43f2-4d96-8c52-6da2b5fdbd94-operator-scripts\") pod \"neutron-daa1-account-create-update-m44v2\" (UID: \"fba35580-43f2-4d96-8c52-6da2b5fdbd94\") " pod="openstack/neutron-daa1-account-create-update-m44v2" Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.609628 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22bgs\" (UniqueName: \"kubernetes.io/projected/9f2d1a12-b4ce-48c2-88b0-5ae881760963-kube-api-access-22bgs\") pod \"root-account-create-update-kn44b\" (UID: \"9f2d1a12-b4ce-48c2-88b0-5ae881760963\") " pod="openstack/root-account-create-update-kn44b" Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.614100 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.614381 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="627b5a59-3da1-4130-92b1-94fcfea8efd4" containerName="openstackclient" containerID="cri-o://72b3f2ee266e67b1cc81df1e410f3bd7f96dcf54ecba5f2d66edb9857899172e" gracePeriod=2 Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.631085 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.714849 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9f2d1a12-b4ce-48c2-88b0-5ae881760963-operator-scripts\") pod \"root-account-create-update-kn44b\" (UID: \"9f2d1a12-b4ce-48c2-88b0-5ae881760963\") " pod="openstack/root-account-create-update-kn44b" Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.714956 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fba35580-43f2-4d96-8c52-6da2b5fdbd94-operator-scripts\") pod \"neutron-daa1-account-create-update-m44v2\" (UID: \"fba35580-43f2-4d96-8c52-6da2b5fdbd94\") " pod="openstack/neutron-daa1-account-create-update-m44v2" Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.714986 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22bgs\" (UniqueName: \"kubernetes.io/projected/9f2d1a12-b4ce-48c2-88b0-5ae881760963-kube-api-access-22bgs\") pod \"root-account-create-update-kn44b\" (UID: \"9f2d1a12-b4ce-48c2-88b0-5ae881760963\") " pod="openstack/root-account-create-update-kn44b" Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.715042 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmxdk\" (UniqueName: \"kubernetes.io/projected/fba35580-43f2-4d96-8c52-6da2b5fdbd94-kube-api-access-nmxdk\") pod \"neutron-daa1-account-create-update-m44v2\" (UID: \"fba35580-43f2-4d96-8c52-6da2b5fdbd94\") " pod="openstack/neutron-daa1-account-create-update-m44v2" Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.716075 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9f2d1a12-b4ce-48c2-88b0-5ae881760963-operator-scripts\") pod \"root-account-create-update-kn44b\" (UID: \"9f2d1a12-b4ce-48c2-88b0-5ae881760963\") " pod="openstack/root-account-create-update-kn44b" Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.716619 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fba35580-43f2-4d96-8c52-6da2b5fdbd94-operator-scripts\") pod \"neutron-daa1-account-create-update-m44v2\" (UID: \"fba35580-43f2-4d96-8c52-6da2b5fdbd94\") " pod="openstack/neutron-daa1-account-create-update-m44v2" Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.739696 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-daa1-account-create-update-gftf9"] Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.757817 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-daa1-account-create-update-gftf9"] Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.793079 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-553f-account-create-update-kjhcr"] Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.793898 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmxdk\" (UniqueName: \"kubernetes.io/projected/fba35580-43f2-4d96-8c52-6da2b5fdbd94-kube-api-access-nmxdk\") pod \"neutron-daa1-account-create-update-m44v2\" (UID: \"fba35580-43f2-4d96-8c52-6da2b5fdbd94\") " pod="openstack/neutron-daa1-account-create-update-m44v2" Jan 04 12:13:18 crc kubenswrapper[5003]: E0104 12:13:18.794859 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="627b5a59-3da1-4130-92b1-94fcfea8efd4" containerName="openstackclient" Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.794881 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="627b5a59-3da1-4130-92b1-94fcfea8efd4" containerName="openstackclient" Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.795103 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="627b5a59-3da1-4130-92b1-94fcfea8efd4" containerName="openstackclient" Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.795825 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-553f-account-create-update-kjhcr" Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.806784 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22bgs\" (UniqueName: \"kubernetes.io/projected/9f2d1a12-b4ce-48c2-88b0-5ae881760963-kube-api-access-22bgs\") pod \"root-account-create-update-kn44b\" (UID: \"9f2d1a12-b4ce-48c2-88b0-5ae881760963\") " pod="openstack/root-account-create-update-kn44b" Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.806976 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.833141 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-daa1-account-create-update-m44v2" Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.867191 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9" path="/var/lib/kubelet/pods/7e9febc9-ba14-4fdd-be87-7bc2cdaa60c9/volumes" Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.867817 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-553f-account-create-update-kjhcr"] Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.867841 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-trt8p"] Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.871943 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-trt8p"] Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.880757 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.880931 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-kn44b" Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.919221 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/28114f85-1d9b-4e71-be5f-d721f06c70dc-operator-scripts\") pod \"barbican-553f-account-create-update-kjhcr\" (UID: \"28114f85-1d9b-4e71-be5f-d721f06c70dc\") " pod="openstack/barbican-553f-account-create-update-kjhcr" Jan 04 12:13:18 crc kubenswrapper[5003]: I0104 12:13:18.919309 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwfgh\" (UniqueName: \"kubernetes.io/projected/28114f85-1d9b-4e71-be5f-d721f06c70dc-kube-api-access-pwfgh\") pod \"barbican-553f-account-create-update-kjhcr\" (UID: \"28114f85-1d9b-4e71-be5f-d721f06c70dc\") " pod="openstack/barbican-553f-account-create-update-kjhcr" Jan 04 12:13:19 crc kubenswrapper[5003]: I0104 12:13:19.019379 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-70b4-account-create-update-gwtlx"] Jan 04 12:13:19 crc kubenswrapper[5003]: I0104 12:13:19.022437 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwfgh\" (UniqueName: \"kubernetes.io/projected/28114f85-1d9b-4e71-be5f-d721f06c70dc-kube-api-access-pwfgh\") pod \"barbican-553f-account-create-update-kjhcr\" (UID: \"28114f85-1d9b-4e71-be5f-d721f06c70dc\") " pod="openstack/barbican-553f-account-create-update-kjhcr" Jan 04 12:13:19 crc kubenswrapper[5003]: I0104 12:13:19.022587 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/28114f85-1d9b-4e71-be5f-d721f06c70dc-operator-scripts\") pod \"barbican-553f-account-create-update-kjhcr\" (UID: \"28114f85-1d9b-4e71-be5f-d721f06c70dc\") " pod="openstack/barbican-553f-account-create-update-kjhcr" Jan 04 12:13:19 crc kubenswrapper[5003]: I0104 12:13:19.023248 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/28114f85-1d9b-4e71-be5f-d721f06c70dc-operator-scripts\") pod \"barbican-553f-account-create-update-kjhcr\" (UID: \"28114f85-1d9b-4e71-be5f-d721f06c70dc\") " pod="openstack/barbican-553f-account-create-update-kjhcr" Jan 04 12:13:19 crc kubenswrapper[5003]: I0104 12:13:19.025541 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-70b4-account-create-update-gwtlx" Jan 04 12:13:19 crc kubenswrapper[5003]: I0104 12:13:19.051007 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-553f-account-create-update-mhxbb"] Jan 04 12:13:19 crc kubenswrapper[5003]: I0104 12:13:19.074970 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-70b4-account-create-update-gwtlx"] Jan 04 12:13:19 crc kubenswrapper[5003]: I0104 12:13:19.076425 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Jan 04 12:13:19 crc kubenswrapper[5003]: I0104 12:13:19.096275 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-553f-account-create-update-mhxbb"] Jan 04 12:13:19 crc kubenswrapper[5003]: I0104 12:13:19.126564 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Jan 04 12:13:19 crc kubenswrapper[5003]: I0104 12:13:19.126873 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="653b2e11-95ca-46e7-b28c-a1170d7a180b" containerName="ovn-northd" containerID="cri-o://3e03c9994a07fa7e18a42bd5c17e591979717aa51ecb5df6f1ce553e5a854fff" gracePeriod=30 Jan 04 12:13:19 crc kubenswrapper[5003]: I0104 12:13:19.127210 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="653b2e11-95ca-46e7-b28c-a1170d7a180b" containerName="openstack-network-exporter" containerID="cri-o://455bff1469c5fdbae60e33d2f9fdcc36a7531d6f5b1512eba0641e8420891546" gracePeriod=30 Jan 04 12:13:19 crc kubenswrapper[5003]: E0104 12:13:19.130493 5003 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 04 12:13:19 crc kubenswrapper[5003]: E0104 12:13:19.130562 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/829003dc-aa5e-43a6-a4f5-c578c73e76d4-config-data podName:829003dc-aa5e-43a6-a4f5-c578c73e76d4 nodeName:}" failed. No retries permitted until 2026-01-04 12:13:19.630542208 +0000 UTC m=+1515.103572049 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/829003dc-aa5e-43a6-a4f5-c578c73e76d4-config-data") pod "rabbitmq-cell1-server-0" (UID: "829003dc-aa5e-43a6-a4f5-c578c73e76d4") : configmap "rabbitmq-cell1-config-data" not found Jan 04 12:13:19 crc kubenswrapper[5003]: I0104 12:13:19.144543 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwfgh\" (UniqueName: \"kubernetes.io/projected/28114f85-1d9b-4e71-be5f-d721f06c70dc-kube-api-access-pwfgh\") pod \"barbican-553f-account-create-update-kjhcr\" (UID: \"28114f85-1d9b-4e71-be5f-d721f06c70dc\") " pod="openstack/barbican-553f-account-create-update-kjhcr" Jan 04 12:13:19 crc kubenswrapper[5003]: I0104 12:13:19.207775 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-553f-account-create-update-kjhcr" Jan 04 12:13:19 crc kubenswrapper[5003]: I0104 12:13:19.223680 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-70b4-account-create-update-jmrxl"] Jan 04 12:13:19 crc kubenswrapper[5003]: I0104 12:13:19.268352 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-70b4-account-create-update-jmrxl"] Jan 04 12:13:19 crc kubenswrapper[5003]: I0104 12:13:19.359189 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f54e309-ce75-4ab4-8fad-f7c1ad1c9cb7-operator-scripts\") pod \"glance-70b4-account-create-update-gwtlx\" (UID: \"2f54e309-ce75-4ab4-8fad-f7c1ad1c9cb7\") " pod="openstack/glance-70b4-account-create-update-gwtlx" Jan 04 12:13:19 crc kubenswrapper[5003]: I0104 12:13:19.359387 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wblkz\" (UniqueName: \"kubernetes.io/projected/2f54e309-ce75-4ab4-8fad-f7c1ad1c9cb7-kube-api-access-wblkz\") pod \"glance-70b4-account-create-update-gwtlx\" (UID: \"2f54e309-ce75-4ab4-8fad-f7c1ad1c9cb7\") " pod="openstack/glance-70b4-account-create-update-gwtlx" Jan 04 12:13:19 crc kubenswrapper[5003]: I0104 12:13:19.370859 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-99mjg"] Jan 04 12:13:19 crc kubenswrapper[5003]: I0104 12:13:19.463244 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f54e309-ce75-4ab4-8fad-f7c1ad1c9cb7-operator-scripts\") pod \"glance-70b4-account-create-update-gwtlx\" (UID: \"2f54e309-ce75-4ab4-8fad-f7c1ad1c9cb7\") " pod="openstack/glance-70b4-account-create-update-gwtlx" Jan 04 12:13:19 crc kubenswrapper[5003]: I0104 12:13:19.463740 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wblkz\" (UniqueName: \"kubernetes.io/projected/2f54e309-ce75-4ab4-8fad-f7c1ad1c9cb7-kube-api-access-wblkz\") pod \"glance-70b4-account-create-update-gwtlx\" (UID: \"2f54e309-ce75-4ab4-8fad-f7c1ad1c9cb7\") " pod="openstack/glance-70b4-account-create-update-gwtlx" Jan 04 12:13:19 crc kubenswrapper[5003]: I0104 12:13:19.465437 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f54e309-ce75-4ab4-8fad-f7c1ad1c9cb7-operator-scripts\") pod \"glance-70b4-account-create-update-gwtlx\" (UID: \"2f54e309-ce75-4ab4-8fad-f7c1ad1c9cb7\") " pod="openstack/glance-70b4-account-create-update-gwtlx" Jan 04 12:13:19 crc kubenswrapper[5003]: I0104 12:13:19.481052 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-ksmlf"] Jan 04 12:13:19 crc kubenswrapper[5003]: I0104 12:13:19.481285 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-metrics-ksmlf" podUID="988b3ee2-147b-4ebc-9d31-42bdaf144bc5" containerName="openstack-network-exporter" containerID="cri-o://2de21e3795685f92c0995b8c0275e374995fb3d1feddd1208205a49fb45022bc" gracePeriod=30 Jan 04 12:13:19 crc kubenswrapper[5003]: I0104 12:13:19.556086 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-fdswd"] Jan 04 12:13:19 crc kubenswrapper[5003]: I0104 12:13:19.614898 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-85q57"] Jan 04 12:13:19 crc kubenswrapper[5003]: I0104 12:13:19.661965 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wblkz\" (UniqueName: \"kubernetes.io/projected/2f54e309-ce75-4ab4-8fad-f7c1ad1c9cb7-kube-api-access-wblkz\") pod \"glance-70b4-account-create-update-gwtlx\" (UID: \"2f54e309-ce75-4ab4-8fad-f7c1ad1c9cb7\") " pod="openstack/glance-70b4-account-create-update-gwtlx" Jan 04 12:13:19 crc kubenswrapper[5003]: E0104 12:13:19.691889 5003 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 04 12:13:19 crc kubenswrapper[5003]: E0104 12:13:19.692046 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/829003dc-aa5e-43a6-a4f5-c578c73e76d4-config-data podName:829003dc-aa5e-43a6-a4f5-c578c73e76d4 nodeName:}" failed. No retries permitted until 2026-01-04 12:13:20.691997552 +0000 UTC m=+1516.165027393 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/829003dc-aa5e-43a6-a4f5-c578c73e76d4-config-data") pod "rabbitmq-cell1-server-0" (UID: "829003dc-aa5e-43a6-a4f5-c578c73e76d4") : configmap "rabbitmq-cell1-config-data" not found Jan 04 12:13:19 crc kubenswrapper[5003]: I0104 12:13:19.766752 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-5f14-account-create-update-zqtvl"] Jan 04 12:13:19 crc kubenswrapper[5003]: I0104 12:13:19.900538 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-85q57"] Jan 04 12:13:19 crc kubenswrapper[5003]: I0104 12:13:19.900713 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-70b4-account-create-update-gwtlx" Jan 04 12:13:20 crc kubenswrapper[5003]: I0104 12:13:20.052118 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-5f14-account-create-update-zqtvl"] Jan 04 12:13:20 crc kubenswrapper[5003]: I0104 12:13:20.155110 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-2zm85"] Jan 04 12:13:20 crc kubenswrapper[5003]: E0104 12:13:20.158495 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3e03c9994a07fa7e18a42bd5c17e591979717aa51ecb5df6f1ce553e5a854fff" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 04 12:13:20 crc kubenswrapper[5003]: I0104 12:13:20.220137 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 04 12:13:20 crc kubenswrapper[5003]: I0104 12:13:20.221247 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-ksmlf_988b3ee2-147b-4ebc-9d31-42bdaf144bc5/openstack-network-exporter/0.log" Jan 04 12:13:20 crc kubenswrapper[5003]: I0104 12:13:20.221310 5003 generic.go:334] "Generic (PLEG): container finished" podID="988b3ee2-147b-4ebc-9d31-42bdaf144bc5" containerID="2de21e3795685f92c0995b8c0275e374995fb3d1feddd1208205a49fb45022bc" exitCode=2 Jan 04 12:13:20 crc kubenswrapper[5003]: I0104 12:13:20.221439 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-ksmlf" event={"ID":"988b3ee2-147b-4ebc-9d31-42bdaf144bc5","Type":"ContainerDied","Data":"2de21e3795685f92c0995b8c0275e374995fb3d1feddd1208205a49fb45022bc"} Jan 04 12:13:20 crc kubenswrapper[5003]: E0104 12:13:20.245829 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3e03c9994a07fa7e18a42bd5c17e591979717aa51ecb5df6f1ce553e5a854fff" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 04 12:13:20 crc kubenswrapper[5003]: I0104 12:13:20.282306 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-2zm85"] Jan 04 12:13:20 crc kubenswrapper[5003]: E0104 12:13:20.314052 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3e03c9994a07fa7e18a42bd5c17e591979717aa51ecb5df6f1ce553e5a854fff" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 04 12:13:20 crc kubenswrapper[5003]: E0104 12:13:20.314103 5003 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="653b2e11-95ca-46e7-b28c-a1170d7a180b" containerName="ovn-northd" Jan 04 12:13:20 crc kubenswrapper[5003]: E0104 12:13:20.323559 5003 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 04 12:13:20 crc kubenswrapper[5003]: E0104 12:13:20.323639 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/81193935-fcd0-4877-9d65-6155c1a888e2-config-data podName:81193935-fcd0-4877-9d65-6155c1a888e2 nodeName:}" failed. No retries permitted until 2026-01-04 12:13:20.82362309 +0000 UTC m=+1516.296652931 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/81193935-fcd0-4877-9d65-6155c1a888e2-config-data") pod "rabbitmq-server-0" (UID: "81193935-fcd0-4877-9d65-6155c1a888e2") : configmap "rabbitmq-config-data" not found Jan 04 12:13:20 crc kubenswrapper[5003]: I0104 12:13:20.351235 5003 generic.go:334] "Generic (PLEG): container finished" podID="653b2e11-95ca-46e7-b28c-a1170d7a180b" containerID="455bff1469c5fdbae60e33d2f9fdcc36a7531d6f5b1512eba0641e8420891546" exitCode=2 Jan 04 12:13:20 crc kubenswrapper[5003]: I0104 12:13:20.351506 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"653b2e11-95ca-46e7-b28c-a1170d7a180b","Type":"ContainerDied","Data":"455bff1469c5fdbae60e33d2f9fdcc36a7531d6f5b1512eba0641e8420891546"} Jan 04 12:13:20 crc kubenswrapper[5003]: I0104 12:13:20.367489 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-ccc2-account-create-update-ftb5c"] Jan 04 12:13:20 crc kubenswrapper[5003]: E0104 12:13:20.383923 5003 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 04 12:13:20 crc kubenswrapper[5003]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 04 12:13:20 crc kubenswrapper[5003]: Jan 04 12:13:20 crc kubenswrapper[5003]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 04 12:13:20 crc kubenswrapper[5003]: Jan 04 12:13:20 crc kubenswrapper[5003]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 04 12:13:20 crc kubenswrapper[5003]: Jan 04 12:13:20 crc kubenswrapper[5003]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 04 12:13:20 crc kubenswrapper[5003]: Jan 04 12:13:20 crc kubenswrapper[5003]: if [ -n "neutron" ]; then Jan 04 12:13:20 crc kubenswrapper[5003]: GRANT_DATABASE="neutron" Jan 04 12:13:20 crc kubenswrapper[5003]: else Jan 04 12:13:20 crc kubenswrapper[5003]: GRANT_DATABASE="*" Jan 04 12:13:20 crc kubenswrapper[5003]: fi Jan 04 12:13:20 crc kubenswrapper[5003]: Jan 04 12:13:20 crc kubenswrapper[5003]: # going for maximum compatibility here: Jan 04 12:13:20 crc kubenswrapper[5003]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 04 12:13:20 crc kubenswrapper[5003]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 04 12:13:20 crc kubenswrapper[5003]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 04 12:13:20 crc kubenswrapper[5003]: # support updates Jan 04 12:13:20 crc kubenswrapper[5003]: Jan 04 12:13:20 crc kubenswrapper[5003]: $MYSQL_CMD < logger="UnhandledError" Jan 04 12:13:20 crc kubenswrapper[5003]: E0104 12:13:20.384984 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"neutron-db-secret\\\" not found\"" pod="openstack/neutron-daa1-account-create-update-m44v2" podUID="fba35580-43f2-4d96-8c52-6da2b5fdbd94" Jan 04 12:13:20 crc kubenswrapper[5003]: I0104 12:13:20.413545 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-4qtmm"] Jan 04 12:13:20 crc kubenswrapper[5003]: I0104 12:13:20.443476 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-4qtmm"] Jan 04 12:13:20 crc kubenswrapper[5003]: I0104 12:13:20.465881 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-ccc2-account-create-update-ftb5c"] Jan 04 12:13:20 crc kubenswrapper[5003]: I0104 12:13:20.496267 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 04 12:13:20 crc kubenswrapper[5003]: I0104 12:13:20.497087 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="741d1d6f-4c11-4352-ba73-c8a13a465c95" containerName="openstack-network-exporter" containerID="cri-o://a681cbc3fe9ff43a2d77746a80489f681d1a0e3c6c0988ff7acac4e32cd4e452" gracePeriod=300 Jan 04 12:13:20 crc kubenswrapper[5003]: I0104 12:13:20.557070 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-qx4js"] Jan 04 12:13:20 crc kubenswrapper[5003]: I0104 12:13:20.627071 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-qx4js"] Jan 04 12:13:20 crc kubenswrapper[5003]: E0104 12:13:20.637149 5003 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err="command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: " execCommand=["/usr/share/ovn/scripts/ovn-ctl","stop_controller"] containerName="ovn-controller" pod="openstack/ovn-controller-fdswd" message="Exiting ovn-controller (1) " Jan 04 12:13:20 crc kubenswrapper[5003]: E0104 12:13:20.637198 5003 kuberuntime_container.go:691] "PreStop hook failed" err="command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: " pod="openstack/ovn-controller-fdswd" podUID="c88e1443-25c4-4e67-83d0-e43cef2b2e5c" containerName="ovn-controller" containerID="cri-o://6388d2c6684a9b9e2ade7062fc176c21d7b16376b2604b8e0a56c525cd3036cb" Jan 04 12:13:20 crc kubenswrapper[5003]: I0104 12:13:20.637238 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-fdswd" podUID="c88e1443-25c4-4e67-83d0-e43cef2b2e5c" containerName="ovn-controller" containerID="cri-o://6388d2c6684a9b9e2ade7062fc176c21d7b16376b2604b8e0a56c525cd3036cb" gracePeriod=29 Jan 04 12:13:20 crc kubenswrapper[5003]: I0104 12:13:20.679633 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-5f14-account-create-update-xsz5w"] Jan 04 12:13:20 crc kubenswrapper[5003]: I0104 12:13:20.681748 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5f14-account-create-update-xsz5w" Jan 04 12:13:20 crc kubenswrapper[5003]: I0104 12:13:20.686356 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Jan 04 12:13:20 crc kubenswrapper[5003]: I0104 12:13:20.713577 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 04 12:13:20 crc kubenswrapper[5003]: I0104 12:13:20.714161 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="c5c96797-3987-489a-8ce2-510caa11262c" containerName="openstack-network-exporter" containerID="cri-o://94bf6e10e5febc0e0054c7eb1c6a15116c187661c05d7923c6e30f3126e4daba" gracePeriod=300 Jan 04 12:13:20 crc kubenswrapper[5003]: I0104 12:13:20.747076 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c304f0e5-d232-4549-ab8f-f11f5008f903-operator-scripts\") pod \"nova-api-5f14-account-create-update-xsz5w\" (UID: \"c304f0e5-d232-4549-ab8f-f11f5008f903\") " pod="openstack/nova-api-5f14-account-create-update-xsz5w" Jan 04 12:13:20 crc kubenswrapper[5003]: I0104 12:13:20.747201 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmjcr\" (UniqueName: \"kubernetes.io/projected/c304f0e5-d232-4549-ab8f-f11f5008f903-kube-api-access-jmjcr\") pod \"nova-api-5f14-account-create-update-xsz5w\" (UID: \"c304f0e5-d232-4549-ab8f-f11f5008f903\") " pod="openstack/nova-api-5f14-account-create-update-xsz5w" Jan 04 12:13:20 crc kubenswrapper[5003]: E0104 12:13:20.748230 5003 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 04 12:13:20 crc kubenswrapper[5003]: E0104 12:13:20.748291 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/829003dc-aa5e-43a6-a4f5-c578c73e76d4-config-data podName:829003dc-aa5e-43a6-a4f5-c578c73e76d4 nodeName:}" failed. No retries permitted until 2026-01-04 12:13:22.748273698 +0000 UTC m=+1518.221303539 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/829003dc-aa5e-43a6-a4f5-c578c73e76d4-config-data") pod "rabbitmq-cell1-server-0" (UID: "829003dc-aa5e-43a6-a4f5-c578c73e76d4") : configmap "rabbitmq-cell1-config-data" not found Jan 04 12:13:20 crc kubenswrapper[5003]: I0104 12:13:20.756603 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-jvhdz"] Jan 04 12:13:20 crc kubenswrapper[5003]: I0104 12:13:20.769597 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-5f14-account-create-update-xsz5w"] Jan 04 12:13:20 crc kubenswrapper[5003]: I0104 12:13:20.814174 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-jvhdz"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:20.850092 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c304f0e5-d232-4549-ab8f-f11f5008f903-operator-scripts\") pod \"nova-api-5f14-account-create-update-xsz5w\" (UID: \"c304f0e5-d232-4549-ab8f-f11f5008f903\") " pod="openstack/nova-api-5f14-account-create-update-xsz5w" Jan 04 12:13:21 crc kubenswrapper[5003]: E0104 12:13:20.850416 5003 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 04 12:13:21 crc kubenswrapper[5003]: E0104 12:13:20.850489 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/81193935-fcd0-4877-9d65-6155c1a888e2-config-data podName:81193935-fcd0-4877-9d65-6155c1a888e2 nodeName:}" failed. No retries permitted until 2026-01-04 12:13:21.850467759 +0000 UTC m=+1517.323497600 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/81193935-fcd0-4877-9d65-6155c1a888e2-config-data") pod "rabbitmq-server-0" (UID: "81193935-fcd0-4877-9d65-6155c1a888e2") : configmap "rabbitmq-config-data" not found Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:20.850187 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmjcr\" (UniqueName: \"kubernetes.io/projected/c304f0e5-d232-4549-ab8f-f11f5008f903-kube-api-access-jmjcr\") pod \"nova-api-5f14-account-create-update-xsz5w\" (UID: \"c304f0e5-d232-4549-ab8f-f11f5008f903\") " pod="openstack/nova-api-5f14-account-create-update-xsz5w" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:20.853737 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c304f0e5-d232-4549-ab8f-f11f5008f903-operator-scripts\") pod \"nova-api-5f14-account-create-update-xsz5w\" (UID: \"c304f0e5-d232-4549-ab8f-f11f5008f903\") " pod="openstack/nova-api-5f14-account-create-update-xsz5w" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:20.884823 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmjcr\" (UniqueName: \"kubernetes.io/projected/c304f0e5-d232-4549-ab8f-f11f5008f903-kube-api-access-jmjcr\") pod \"nova-api-5f14-account-create-update-xsz5w\" (UID: \"c304f0e5-d232-4549-ab8f-f11f5008f903\") " pod="openstack/nova-api-5f14-account-create-update-xsz5w" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.036769 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="471449a2-d6cc-477b-bad9-a616a54f4502" path="/var/lib/kubelet/pods/471449a2-d6cc-477b-bad9-a616a54f4502/volumes" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.037693 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ad51468-82d2-4b49-b778-f2d296eafbf1" path="/var/lib/kubelet/pods/4ad51468-82d2-4b49-b778-f2d296eafbf1/volumes" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.041775 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54218530-45fd-460c-b755-49b5f28b0f02" path="/var/lib/kubelet/pods/54218530-45fd-460c-b755-49b5f28b0f02/volumes" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.046167 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="741d1d6f-4c11-4352-ba73-c8a13a465c95" containerName="ovsdbserver-sb" containerID="cri-o://2a3d8a474b7ca0e0e5332e407803134102267d6de0d471b1d7bfcb2f97eaa762" gracePeriod=300 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.047392 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="724a46c8-1096-4753-ba3e-e4128189c8ae" path="/var/lib/kubelet/pods/724a46c8-1096-4753-ba3e-e4128189c8ae/volumes" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.048050 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95845d2e-e8cf-4d56-ad63-260115e0efc8" path="/var/lib/kubelet/pods/95845d2e-e8cf-4d56-ad63-260115e0efc8/volumes" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.048687 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6fea540-25d6-49c0-86f9-9476e70ceb93" path="/var/lib/kubelet/pods/a6fea540-25d6-49c0-86f9-9476e70ceb93/volumes" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.049931 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b47fe1da-877a-4d5e-a21b-c7955bd00b30" path="/var/lib/kubelet/pods/b47fe1da-877a-4d5e-a21b-c7955bd00b30/volumes" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.051368 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2b0b250-33ee-45aa-baa8-1540d9f39b03" path="/var/lib/kubelet/pods/c2b0b250-33ee-45aa-baa8-1540d9f39b03/volumes" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.053052 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d2d06f5b-83b1-4aea-9ab1-83eecc92dd98" path="/var/lib/kubelet/pods/d2d06f5b-83b1-4aea-9ab1-83eecc92dd98/volumes" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.053637 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da444afc-236b-465d-a105-f6c3b25f677c" path="/var/lib/kubelet/pods/da444afc-236b-465d-a105-f6c3b25f677c/volumes" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.056086 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-g4p7b"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.056117 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.056139 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-g4p7b"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.056157 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-5569cb5574-jt6r6"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.056172 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7d88cc4d8f-5mhr4"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.056187 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fcd6f8f8f-7kqd7"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.056223 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.056238 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-daa1-account-create-update-m44v2"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.056485 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="7d5d490f-d968-4237-8a63-7f7d01b8708d" containerName="cinder-scheduler" containerID="cri-o://5e99af0ebfd30376d7774551d5e2627fb89feccfb91fc75b405039132573a695" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.056907 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="account-server" containerID="cri-o://8a065f77698f6ac06fadadaa5b0b12a9e635a05f7a2fd3ab7f7457eb16357d7d" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.057135 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-5569cb5574-jt6r6" podUID="0ebba05f-e935-404f-85c0-4bd28a6afd28" containerName="placement-log" containerID="cri-o://7c4f0d8d5cf985bf4872ff6425b0a428694655251809cbbd7b02ebfb5aeb3a85" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.057177 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="swift-recon-cron" containerID="cri-o://1ff1b1d3338eb5bfcb21f80e2c4c2e6ad020fb1d499df31a3c24c44755e46e60" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.057187 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="7d5d490f-d968-4237-8a63-7f7d01b8708d" containerName="probe" containerID="cri-o://08cc0e5f5513aaea421a5ce329e2f5d7400d11b3025b3985ed67ee4874cda18c" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.057221 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="object-expirer" containerID="cri-o://ed59910d0aea135dbd4ee19aeda59d078e4feebb5abcaf39f16948cd769ad0c6" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.057250 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="object-updater" containerID="cri-o://10f362bf62129cfc68ea5030dc9a630a990d022be1874d460b3a1e2b97c3806b" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.057283 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="object-auditor" containerID="cri-o://c7da6298aedb8336c663f0f72ed02aee2693e470451ea9ed4c2506018c7c3b8b" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.057210 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="rsync" containerID="cri-o://0f945f657e49547a265f6f68bbeab5475213ad26ac43900d9363d069d96d532d" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.057317 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="object-replicator" containerID="cri-o://c02d818e96b369de068228bec08edd738f084527048823b6a9f1dc73d5473513" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.057351 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="object-server" containerID="cri-o://f2aa7e67c73edf19a995e58eaf9b8785bfd532521dc609235e9c65097cf71384" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.057385 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="container-updater" containerID="cri-o://95da8a5d432f4fe64060441f956785bb4f966684b7334a245346aec06e1cf140" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.057417 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="container-auditor" containerID="cri-o://0f48ea6f8d2e18984ca5443a03a04938c8b360cd16c1c6815b02f2fb373f0a8e" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.057449 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="container-replicator" containerID="cri-o://f0aaa591de01ea442a981d1ec695614335ad33f8028cf7d9ef5da12021491ff0" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.057480 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="container-server" containerID="cri-o://dc00a97e29c22bf9a2a36c4af6d0c30fdef5266c6b5c76c89cee2d2f47cd401e" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.057513 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="account-reaper" containerID="cri-o://707399f94576a05d1029f0ca7a930546bdc46ba6b8a66a7f7d5123ee7b10547b" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.057544 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="account-auditor" containerID="cri-o://8d2a2c4e9a22b4fe3fd7b40f0290eeae57fd3d5fa8a0b12d022f40ed1d9de1ab" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.057576 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="account-replicator" containerID="cri-o://9859f2a44803d2a1d464de1de41f2bfd77e5f0896ae37ca5e574d6ba7d0b8491" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.057662 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7d88cc4d8f-5mhr4" podUID="23de8292-dc91-45db-8de9-59933352e3f2" containerName="neutron-api" containerID="cri-o://3d08041741653f728a1dcd7a717b74f0e1152bf8a3fc8991a888bfea4968ac08" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.057719 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-5569cb5574-jt6r6" podUID="0ebba05f-e935-404f-85c0-4bd28a6afd28" containerName="placement-api" containerID="cri-o://e10ea013e6db1ec608b48d073081c9a1e3b0565542f8409e8438d07885d6f975" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.057809 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-fcd6f8f8f-7kqd7" podUID="67adf34a-962a-435b-8e35-ae1387c097b6" containerName="dnsmasq-dns" containerID="cri-o://ce4043834be33197cdad3e565b1de177593fb0b0b9db95e5d58f8971339c2f91" gracePeriod=10 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.057861 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7d88cc4d8f-5mhr4" podUID="23de8292-dc91-45db-8de9-59933352e3f2" containerName="neutron-httpd" containerID="cri-o://312b6e09c8b9a68b781c0ec705d9f1a75feca3742a105d128ddcfa936c23e624" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.067156 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-55ncj"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.103346 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="c5c96797-3987-489a-8ce2-510caa11262c" containerName="ovsdbserver-nb" containerID="cri-o://5bfd4afff231ef79e9cf1ed25e3e862879baafe9a1abee867c1c87bf017c640d" gracePeriod=300 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.117809 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-55ncj"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.176296 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-jmh76"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.225647 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-k4dms"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.297856 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-jmh76"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.343956 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 04 12:13:21 crc kubenswrapper[5003]: E0104 12:13:21.347209 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2a3d8a474b7ca0e0e5332e407803134102267d6de0d471b1d7bfcb2f97eaa762 is running failed: container process not found" containerID="2a3d8a474b7ca0e0e5332e407803134102267d6de0d471b1d7bfcb2f97eaa762" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 04 12:13:21 crc kubenswrapper[5003]: E0104 12:13:21.349111 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2a3d8a474b7ca0e0e5332e407803134102267d6de0d471b1d7bfcb2f97eaa762 is running failed: container process not found" containerID="2a3d8a474b7ca0e0e5332e407803134102267d6de0d471b1d7bfcb2f97eaa762" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.349514 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="2c899132-ee49-4ab3-89ea-95f0bfcb71ab" containerName="cinder-api-log" containerID="cri-o://8dc168dd5d62f051b24906056242c3db9d4e7a18cca4bd69532834764b2e4b47" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.349862 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="2c899132-ee49-4ab3-89ea-95f0bfcb71ab" containerName="cinder-api" containerID="cri-o://46919056bf4b2db61c4b851726637476ed5f878bddbce873c5a30520c666773e" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: E0104 12:13:21.352805 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2a3d8a474b7ca0e0e5332e407803134102267d6de0d471b1d7bfcb2f97eaa762 is running failed: container process not found" containerID="2a3d8a474b7ca0e0e5332e407803134102267d6de0d471b1d7bfcb2f97eaa762" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 04 12:13:21 crc kubenswrapper[5003]: E0104 12:13:21.352897 5003 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2a3d8a474b7ca0e0e5332e407803134102267d6de0d471b1d7bfcb2f97eaa762 is running failed: container process not found" probeType="Readiness" pod="openstack/ovsdbserver-sb-0" podUID="741d1d6f-4c11-4352-ba73-c8a13a465c95" containerName="ovsdbserver-sb" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.381131 5003 generic.go:334] "Generic (PLEG): container finished" podID="627b5a59-3da1-4130-92b1-94fcfea8efd4" containerID="72b3f2ee266e67b1cc81df1e410f3bd7f96dcf54ecba5f2d66edb9857899172e" exitCode=137 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.381718 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-k4dms"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.389549 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.389870 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="43c1199f-e162-4062-a972-417afa58eaa6" containerName="glance-log" containerID="cri-o://2faff7fc607a82782ff7622c7373fabbd6ab09171776a5361884986f16ec6df5" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.390054 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="43c1199f-e162-4062-a972-417afa58eaa6" containerName="glance-httpd" containerID="cri-o://193d03f2dc8b6a55e957d530e54441551f102796455d35a76dcd721cbba41982" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.398049 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.398381 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="714823a9-560a-496c-b975-2db1099ad873" containerName="glance-log" containerID="cri-o://02447dca870462f4df40064d89c32f3a3d466ae10125a4bb87a31ddc1ecd1f9e" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.398552 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="714823a9-560a-496c-b975-2db1099ad873" containerName="glance-httpd" containerID="cri-o://5b23433759eb714853942994a37054e568cfd11f999d8e6d3f6f86c33c3787c4" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.399356 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-99mjg" podUID="d91cd6f4-0e52-4519-b337-9a7c2779b7f1" containerName="ovs-vswitchd" containerID="cri-o://dc2a9fd5a4e5efaaf4816f8b92204d68eeef36829b12562226d86ab9c2026294" gracePeriod=28 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.407946 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-6885ccbc9-jntqm"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.410407 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-6885ccbc9-jntqm" podUID="afad0966-8385-444b-9eed-8418c0a49b2a" containerName="proxy-httpd" containerID="cri-o://6615326903f6990ecae76e7fe834c4ecc3fe23051337c2d5af2308f338730040" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.410564 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-6885ccbc9-jntqm" podUID="afad0966-8385-444b-9eed-8418c0a49b2a" containerName="proxy-server" containerID="cri-o://7f17c88fe39230076ac013b10c5ff4f44f766a32bb39a15570523798fd0cee22" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.423287 5003 generic.go:334] "Generic (PLEG): container finished" podID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerID="ed59910d0aea135dbd4ee19aeda59d078e4feebb5abcaf39f16948cd769ad0c6" exitCode=0 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.423319 5003 generic.go:334] "Generic (PLEG): container finished" podID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerID="10f362bf62129cfc68ea5030dc9a630a990d022be1874d460b3a1e2b97c3806b" exitCode=0 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.423326 5003 generic.go:334] "Generic (PLEG): container finished" podID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerID="c7da6298aedb8336c663f0f72ed02aee2693e470451ea9ed4c2506018c7c3b8b" exitCode=0 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.423333 5003 generic.go:334] "Generic (PLEG): container finished" podID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerID="c02d818e96b369de068228bec08edd738f084527048823b6a9f1dc73d5473513" exitCode=0 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.423342 5003 generic.go:334] "Generic (PLEG): container finished" podID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerID="95da8a5d432f4fe64060441f956785bb4f966684b7334a245346aec06e1cf140" exitCode=0 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.423349 5003 generic.go:334] "Generic (PLEG): container finished" podID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerID="0f48ea6f8d2e18984ca5443a03a04938c8b360cd16c1c6815b02f2fb373f0a8e" exitCode=0 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.423356 5003 generic.go:334] "Generic (PLEG): container finished" podID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerID="f0aaa591de01ea442a981d1ec695614335ad33f8028cf7d9ef5da12021491ff0" exitCode=0 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.423370 5003 generic.go:334] "Generic (PLEG): container finished" podID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerID="707399f94576a05d1029f0ca7a930546bdc46ba6b8a66a7f7d5123ee7b10547b" exitCode=0 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.423377 5003 generic.go:334] "Generic (PLEG): container finished" podID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerID="8d2a2c4e9a22b4fe3fd7b40f0290eeae57fd3d5fa8a0b12d022f40ed1d9de1ab" exitCode=0 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.423383 5003 generic.go:334] "Generic (PLEG): container finished" podID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerID="9859f2a44803d2a1d464de1de41f2bfd77e5f0896ae37ca5e574d6ba7d0b8491" exitCode=0 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.423455 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.423484 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e387635d-9ef2-4b1d-9303-0d762e8b282c","Type":"ContainerDied","Data":"ed59910d0aea135dbd4ee19aeda59d078e4feebb5abcaf39f16948cd769ad0c6"} Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.423502 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e387635d-9ef2-4b1d-9303-0d762e8b282c","Type":"ContainerDied","Data":"10f362bf62129cfc68ea5030dc9a630a990d022be1874d460b3a1e2b97c3806b"} Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.423514 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e387635d-9ef2-4b1d-9303-0d762e8b282c","Type":"ContainerDied","Data":"c7da6298aedb8336c663f0f72ed02aee2693e470451ea9ed4c2506018c7c3b8b"} Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.423529 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e387635d-9ef2-4b1d-9303-0d762e8b282c","Type":"ContainerDied","Data":"c02d818e96b369de068228bec08edd738f084527048823b6a9f1dc73d5473513"} Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.423542 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e387635d-9ef2-4b1d-9303-0d762e8b282c","Type":"ContainerDied","Data":"95da8a5d432f4fe64060441f956785bb4f966684b7334a245346aec06e1cf140"} Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.423554 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e387635d-9ef2-4b1d-9303-0d762e8b282c","Type":"ContainerDied","Data":"0f48ea6f8d2e18984ca5443a03a04938c8b360cd16c1c6815b02f2fb373f0a8e"} Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.423564 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e387635d-9ef2-4b1d-9303-0d762e8b282c","Type":"ContainerDied","Data":"f0aaa591de01ea442a981d1ec695614335ad33f8028cf7d9ef5da12021491ff0"} Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.423573 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e387635d-9ef2-4b1d-9303-0d762e8b282c","Type":"ContainerDied","Data":"707399f94576a05d1029f0ca7a930546bdc46ba6b8a66a7f7d5123ee7b10547b"} Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.423582 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e387635d-9ef2-4b1d-9303-0d762e8b282c","Type":"ContainerDied","Data":"8d2a2c4e9a22b4fe3fd7b40f0290eeae57fd3d5fa8a0b12d022f40ed1d9de1ab"} Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.423590 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e387635d-9ef2-4b1d-9303-0d762e8b282c","Type":"ContainerDied","Data":"9859f2a44803d2a1d464de1de41f2bfd77e5f0896ae37ca5e574d6ba7d0b8491"} Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.441532 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-ksmlf_988b3ee2-147b-4ebc-9d31-42bdaf144bc5/openstack-network-exporter/0.log" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.441621 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-ksmlf" event={"ID":"988b3ee2-147b-4ebc-9d31-42bdaf144bc5","Type":"ContainerDied","Data":"24f90983c5e9855fd85fd4d036576f9825383efadffd1d6553f46bd882adf303"} Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.441651 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="24f90983c5e9855fd85fd4d036576f9825383efadffd1d6553f46bd882adf303" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.444219 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-e45f-account-create-update-llf8z"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.450024 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-e45f-account-create-update-llf8z"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.451130 5003 generic.go:334] "Generic (PLEG): container finished" podID="67adf34a-962a-435b-8e35-ae1387c097b6" containerID="ce4043834be33197cdad3e565b1de177593fb0b0b9db95e5d58f8971339c2f91" exitCode=0 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.451183 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcd6f8f8f-7kqd7" event={"ID":"67adf34a-962a-435b-8e35-ae1387c097b6","Type":"ContainerDied","Data":"ce4043834be33197cdad3e565b1de177593fb0b0b9db95e5d58f8971339c2f91"} Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.459388 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-89hnr"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.460223 5003 generic.go:334] "Generic (PLEG): container finished" podID="0ebba05f-e935-404f-85c0-4bd28a6afd28" containerID="7c4f0d8d5cf985bf4872ff6425b0a428694655251809cbbd7b02ebfb5aeb3a85" exitCode=143 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.460295 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5569cb5574-jt6r6" event={"ID":"0ebba05f-e935-404f-85c0-4bd28a6afd28","Type":"ContainerDied","Data":"7c4f0d8d5cf985bf4872ff6425b0a428694655251809cbbd7b02ebfb5aeb3a85"} Jan 04 12:13:21 crc kubenswrapper[5003]: E0104 12:13:21.484101 5003 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod627b5a59_3da1_4130_92b1_94fcfea8efd4.slice/crio-72b3f2ee266e67b1cc81df1e410f3bd7f96dcf54ecba5f2d66edb9857899172e.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod741d1d6f_4c11_4352_ba73_c8a13a465c95.slice/crio-a681cbc3fe9ff43a2d77746a80489f681d1a0e3c6c0988ff7acac4e32cd4e452.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode387635d_9ef2_4b1d_9303_0d762e8b282c.slice/crio-10f362bf62129cfc68ea5030dc9a630a990d022be1874d460b3a1e2b97c3806b.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod741d1d6f_4c11_4352_ba73_c8a13a465c95.slice/crio-conmon-2a3d8a474b7ca0e0e5332e407803134102267d6de0d471b1d7bfcb2f97eaa762.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc88e1443_25c4_4e67_83d0_e43cef2b2e5c.slice/crio-6388d2c6684a9b9e2ade7062fc176c21d7b16376b2604b8e0a56c525cd3036cb.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod741d1d6f_4c11_4352_ba73_c8a13a465c95.slice/crio-conmon-a681cbc3fe9ff43a2d77746a80489f681d1a0e3c6c0988ff7acac4e32cd4e452.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode387635d_9ef2_4b1d_9303_0d762e8b282c.slice/crio-0f48ea6f8d2e18984ca5443a03a04938c8b360cd16c1c6815b02f2fb373f0a8e.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0ebba05f_e935_404f_85c0_4bd28a6afd28.slice/crio-7c4f0d8d5cf985bf4872ff6425b0a428694655251809cbbd7b02ebfb5aeb3a85.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc5c96797_3987_489a_8ce2_510caa11262c.slice/crio-5bfd4afff231ef79e9cf1ed25e3e862879baafe9a1abee867c1c87bf017c640d.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode387635d_9ef2_4b1d_9303_0d762e8b282c.slice/crio-8d2a2c4e9a22b4fe3fd7b40f0290eeae57fd3d5fa8a0b12d022f40ed1d9de1ab.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode387635d_9ef2_4b1d_9303_0d762e8b282c.slice/crio-707399f94576a05d1029f0ca7a930546bdc46ba6b8a66a7f7d5123ee7b10547b.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc88e1443_25c4_4e67_83d0_e43cef2b2e5c.slice/crio-conmon-6388d2c6684a9b9e2ade7062fc176c21d7b16376b2604b8e0a56c525cd3036cb.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod43c1199f_e162_4062_a972_417afa58eaa6.slice/crio-2faff7fc607a82782ff7622c7373fabbd6ab09171776a5361884986f16ec6df5.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod43c1199f_e162_4062_a972_417afa58eaa6.slice/crio-conmon-2faff7fc607a82782ff7622c7373fabbd6ab09171776a5361884986f16ec6df5.scope\": RecentStats: unable to find data in memory cache]" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.504164 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="829003dc-aa5e-43a6-a4f5-c578c73e76d4" containerName="rabbitmq" containerID="cri-o://a7db53d84b6d5b63248f6eb1e83906ab06a6912bc5b207be4b9a8cd84f1c3d9f" gracePeriod=604800 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.521094 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-89hnr"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.538415 5003 generic.go:334] "Generic (PLEG): container finished" podID="741d1d6f-4c11-4352-ba73-c8a13a465c95" containerID="a681cbc3fe9ff43a2d77746a80489f681d1a0e3c6c0988ff7acac4e32cd4e452" exitCode=2 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.538447 5003 generic.go:334] "Generic (PLEG): container finished" podID="741d1d6f-4c11-4352-ba73-c8a13a465c95" containerID="2a3d8a474b7ca0e0e5332e407803134102267d6de0d471b1d7bfcb2f97eaa762" exitCode=0 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.538493 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"741d1d6f-4c11-4352-ba73-c8a13a465c95","Type":"ContainerDied","Data":"a681cbc3fe9ff43a2d77746a80489f681d1a0e3c6c0988ff7acac4e32cd4e452"} Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.538523 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"741d1d6f-4c11-4352-ba73-c8a13a465c95","Type":"ContainerDied","Data":"2a3d8a474b7ca0e0e5332e407803134102267d6de0d471b1d7bfcb2f97eaa762"} Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.540364 5003 generic.go:334] "Generic (PLEG): container finished" podID="c88e1443-25c4-4e67-83d0-e43cef2b2e5c" containerID="6388d2c6684a9b9e2ade7062fc176c21d7b16376b2604b8e0a56c525cd3036cb" exitCode=0 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.540405 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-fdswd" event={"ID":"c88e1443-25c4-4e67-83d0-e43cef2b2e5c","Type":"ContainerDied","Data":"6388d2c6684a9b9e2ade7062fc176c21d7b16376b2604b8e0a56c525cd3036cb"} Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.542756 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-daa1-account-create-update-m44v2" event={"ID":"fba35580-43f2-4d96-8c52-6da2b5fdbd94","Type":"ContainerStarted","Data":"0a9e72a8af83b11d4f125c9da398452d30946fab5d7d128c67796709ab93ff32"} Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.555557 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_c5c96797-3987-489a-8ce2-510caa11262c/ovsdbserver-nb/0.log" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.555596 5003 generic.go:334] "Generic (PLEG): container finished" podID="c5c96797-3987-489a-8ce2-510caa11262c" containerID="94bf6e10e5febc0e0054c7eb1c6a15116c187661c05d7923c6e30f3126e4daba" exitCode=2 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.555613 5003 generic.go:334] "Generic (PLEG): container finished" podID="c5c96797-3987-489a-8ce2-510caa11262c" containerID="5bfd4afff231ef79e9cf1ed25e3e862879baafe9a1abee867c1c87bf017c640d" exitCode=143 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.555631 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"c5c96797-3987-489a-8ce2-510caa11262c","Type":"ContainerDied","Data":"94bf6e10e5febc0e0054c7eb1c6a15116c187661c05d7923c6e30f3126e4daba"} Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.555650 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"c5c96797-3987-489a-8ce2-510caa11262c","Type":"ContainerDied","Data":"5bfd4afff231ef79e9cf1ed25e3e862879baafe9a1abee867c1c87bf017c640d"} Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.564448 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 04 12:13:21 crc kubenswrapper[5003]: E0104 12:13:21.585976 5003 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 04 12:13:21 crc kubenswrapper[5003]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 04 12:13:21 crc kubenswrapper[5003]: Jan 04 12:13:21 crc kubenswrapper[5003]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 04 12:13:21 crc kubenswrapper[5003]: Jan 04 12:13:21 crc kubenswrapper[5003]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 04 12:13:21 crc kubenswrapper[5003]: Jan 04 12:13:21 crc kubenswrapper[5003]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 04 12:13:21 crc kubenswrapper[5003]: Jan 04 12:13:21 crc kubenswrapper[5003]: if [ -n "neutron" ]; then Jan 04 12:13:21 crc kubenswrapper[5003]: GRANT_DATABASE="neutron" Jan 04 12:13:21 crc kubenswrapper[5003]: else Jan 04 12:13:21 crc kubenswrapper[5003]: GRANT_DATABASE="*" Jan 04 12:13:21 crc kubenswrapper[5003]: fi Jan 04 12:13:21 crc kubenswrapper[5003]: Jan 04 12:13:21 crc kubenswrapper[5003]: # going for maximum compatibility here: Jan 04 12:13:21 crc kubenswrapper[5003]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 04 12:13:21 crc kubenswrapper[5003]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 04 12:13:21 crc kubenswrapper[5003]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 04 12:13:21 crc kubenswrapper[5003]: # support updates Jan 04 12:13:21 crc kubenswrapper[5003]: Jan 04 12:13:21 crc kubenswrapper[5003]: $MYSQL_CMD < logger="UnhandledError" Jan 04 12:13:21 crc kubenswrapper[5003]: E0104 12:13:21.588338 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"neutron-db-secret\\\" not found\"" pod="openstack/neutron-daa1-account-create-update-m44v2" podUID="fba35580-43f2-4d96-8c52-6da2b5fdbd94" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.601856 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5f14-account-create-update-xsz5w" Jan 04 12:13:21 crc kubenswrapper[5003]: E0104 12:13:21.602794 5003 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Jan 04 12:13:21 crc kubenswrapper[5003]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Jan 04 12:13:21 crc kubenswrapper[5003]: + source /usr/local/bin/container-scripts/functions Jan 04 12:13:21 crc kubenswrapper[5003]: ++ OVNBridge=br-int Jan 04 12:13:21 crc kubenswrapper[5003]: ++ OVNRemote=tcp:localhost:6642 Jan 04 12:13:21 crc kubenswrapper[5003]: ++ OVNEncapType=geneve Jan 04 12:13:21 crc kubenswrapper[5003]: ++ OVNAvailabilityZones= Jan 04 12:13:21 crc kubenswrapper[5003]: ++ EnableChassisAsGateway=true Jan 04 12:13:21 crc kubenswrapper[5003]: ++ PhysicalNetworks= Jan 04 12:13:21 crc kubenswrapper[5003]: ++ OVNHostName= Jan 04 12:13:21 crc kubenswrapper[5003]: ++ DB_FILE=/etc/openvswitch/conf.db Jan 04 12:13:21 crc kubenswrapper[5003]: ++ ovs_dir=/var/lib/openvswitch Jan 04 12:13:21 crc kubenswrapper[5003]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Jan 04 12:13:21 crc kubenswrapper[5003]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Jan 04 12:13:21 crc kubenswrapper[5003]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 04 12:13:21 crc kubenswrapper[5003]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 04 12:13:21 crc kubenswrapper[5003]: + sleep 0.5 Jan 04 12:13:21 crc kubenswrapper[5003]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 04 12:13:21 crc kubenswrapper[5003]: + sleep 0.5 Jan 04 12:13:21 crc kubenswrapper[5003]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 04 12:13:21 crc kubenswrapper[5003]: + sleep 0.5 Jan 04 12:13:21 crc kubenswrapper[5003]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 04 12:13:21 crc kubenswrapper[5003]: + cleanup_ovsdb_server_semaphore Jan 04 12:13:21 crc kubenswrapper[5003]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 04 12:13:21 crc kubenswrapper[5003]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Jan 04 12:13:21 crc kubenswrapper[5003]: > execCommand=["/usr/local/bin/container-scripts/stop-ovsdb-server.sh"] containerName="ovsdb-server" pod="openstack/ovn-controller-ovs-99mjg" message=< Jan 04 12:13:21 crc kubenswrapper[5003]: Exiting ovsdb-server (5) [ OK ] Jan 04 12:13:21 crc kubenswrapper[5003]: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Jan 04 12:13:21 crc kubenswrapper[5003]: + source /usr/local/bin/container-scripts/functions Jan 04 12:13:21 crc kubenswrapper[5003]: ++ OVNBridge=br-int Jan 04 12:13:21 crc kubenswrapper[5003]: ++ OVNRemote=tcp:localhost:6642 Jan 04 12:13:21 crc kubenswrapper[5003]: ++ OVNEncapType=geneve Jan 04 12:13:21 crc kubenswrapper[5003]: ++ OVNAvailabilityZones= Jan 04 12:13:21 crc kubenswrapper[5003]: ++ EnableChassisAsGateway=true Jan 04 12:13:21 crc kubenswrapper[5003]: ++ PhysicalNetworks= Jan 04 12:13:21 crc kubenswrapper[5003]: ++ OVNHostName= Jan 04 12:13:21 crc kubenswrapper[5003]: ++ DB_FILE=/etc/openvswitch/conf.db Jan 04 12:13:21 crc kubenswrapper[5003]: ++ ovs_dir=/var/lib/openvswitch Jan 04 12:13:21 crc kubenswrapper[5003]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Jan 04 12:13:21 crc kubenswrapper[5003]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Jan 04 12:13:21 crc kubenswrapper[5003]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 04 12:13:21 crc kubenswrapper[5003]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 04 12:13:21 crc kubenswrapper[5003]: + sleep 0.5 Jan 04 12:13:21 crc kubenswrapper[5003]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 04 12:13:21 crc kubenswrapper[5003]: + sleep 0.5 Jan 04 12:13:21 crc kubenswrapper[5003]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 04 12:13:21 crc kubenswrapper[5003]: + sleep 0.5 Jan 04 12:13:21 crc kubenswrapper[5003]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 04 12:13:21 crc kubenswrapper[5003]: + cleanup_ovsdb_server_semaphore Jan 04 12:13:21 crc kubenswrapper[5003]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 04 12:13:21 crc kubenswrapper[5003]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Jan 04 12:13:21 crc kubenswrapper[5003]: > Jan 04 12:13:21 crc kubenswrapper[5003]: E0104 12:13:21.602834 5003 kuberuntime_container.go:691] "PreStop hook failed" err=< Jan 04 12:13:21 crc kubenswrapper[5003]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Jan 04 12:13:21 crc kubenswrapper[5003]: + source /usr/local/bin/container-scripts/functions Jan 04 12:13:21 crc kubenswrapper[5003]: ++ OVNBridge=br-int Jan 04 12:13:21 crc kubenswrapper[5003]: ++ OVNRemote=tcp:localhost:6642 Jan 04 12:13:21 crc kubenswrapper[5003]: ++ OVNEncapType=geneve Jan 04 12:13:21 crc kubenswrapper[5003]: ++ OVNAvailabilityZones= Jan 04 12:13:21 crc kubenswrapper[5003]: ++ EnableChassisAsGateway=true Jan 04 12:13:21 crc kubenswrapper[5003]: ++ PhysicalNetworks= Jan 04 12:13:21 crc kubenswrapper[5003]: ++ OVNHostName= Jan 04 12:13:21 crc kubenswrapper[5003]: ++ DB_FILE=/etc/openvswitch/conf.db Jan 04 12:13:21 crc kubenswrapper[5003]: ++ ovs_dir=/var/lib/openvswitch Jan 04 12:13:21 crc kubenswrapper[5003]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Jan 04 12:13:21 crc kubenswrapper[5003]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Jan 04 12:13:21 crc kubenswrapper[5003]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 04 12:13:21 crc kubenswrapper[5003]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 04 12:13:21 crc kubenswrapper[5003]: + sleep 0.5 Jan 04 12:13:21 crc kubenswrapper[5003]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 04 12:13:21 crc kubenswrapper[5003]: + sleep 0.5 Jan 04 12:13:21 crc kubenswrapper[5003]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 04 12:13:21 crc kubenswrapper[5003]: + sleep 0.5 Jan 04 12:13:21 crc kubenswrapper[5003]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 04 12:13:21 crc kubenswrapper[5003]: + cleanup_ovsdb_server_semaphore Jan 04 12:13:21 crc kubenswrapper[5003]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 04 12:13:21 crc kubenswrapper[5003]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Jan 04 12:13:21 crc kubenswrapper[5003]: > pod="openstack/ovn-controller-ovs-99mjg" podUID="d91cd6f4-0e52-4519-b337-9a7c2779b7f1" containerName="ovsdb-server" containerID="cri-o://4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.602877 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-99mjg" podUID="d91cd6f4-0e52-4519-b337-9a7c2779b7f1" containerName="ovsdb-server" containerID="cri-o://4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3" gracePeriod=28 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.605938 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-5d455558d5-f58qc"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.606171 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-5d455558d5-f58qc" podUID="f0dcef7e-0621-4399-b967-5d5f90dd695f" containerName="barbican-worker-log" containerID="cri-o://b3d17d863faaeeeaa347265306da2ca818931952ebab5c074c74f4eaf33efddb" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.606507 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-5d455558d5-f58qc" podUID="f0dcef7e-0621-4399-b967-5d5f90dd695f" containerName="barbican-worker" containerID="cri-o://d3cbe354dea063f86f1a93a83f133720e47dd24a58d27a2d2bdf3cd839088357" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.609739 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-ksmlf_988b3ee2-147b-4ebc-9d31-42bdaf144bc5/openstack-network-exporter/0.log" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.609779 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-ksmlf" Jan 04 12:13:21 crc kubenswrapper[5003]: E0104 12:13:21.636857 5003 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 04 12:13:21 crc kubenswrapper[5003]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 04 12:13:21 crc kubenswrapper[5003]: Jan 04 12:13:21 crc kubenswrapper[5003]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 04 12:13:21 crc kubenswrapper[5003]: Jan 04 12:13:21 crc kubenswrapper[5003]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 04 12:13:21 crc kubenswrapper[5003]: Jan 04 12:13:21 crc kubenswrapper[5003]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 04 12:13:21 crc kubenswrapper[5003]: Jan 04 12:13:21 crc kubenswrapper[5003]: if [ -n "barbican" ]; then Jan 04 12:13:21 crc kubenswrapper[5003]: GRANT_DATABASE="barbican" Jan 04 12:13:21 crc kubenswrapper[5003]: else Jan 04 12:13:21 crc kubenswrapper[5003]: GRANT_DATABASE="*" Jan 04 12:13:21 crc kubenswrapper[5003]: fi Jan 04 12:13:21 crc kubenswrapper[5003]: Jan 04 12:13:21 crc kubenswrapper[5003]: # going for maximum compatibility here: Jan 04 12:13:21 crc kubenswrapper[5003]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 04 12:13:21 crc kubenswrapper[5003]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 04 12:13:21 crc kubenswrapper[5003]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 04 12:13:21 crc kubenswrapper[5003]: # support updates Jan 04 12:13:21 crc kubenswrapper[5003]: Jan 04 12:13:21 crc kubenswrapper[5003]: $MYSQL_CMD < logger="UnhandledError" Jan 04 12:13:21 crc kubenswrapper[5003]: E0104 12:13:21.639358 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"barbican-db-secret\\\" not found\"" pod="openstack/barbican-553f-account-create-update-kjhcr" podUID="28114f85-1d9b-4e71-be5f-d721f06c70dc" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.648394 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.648659 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b4c84d72-3209-4925-9eb2-cbebcd1e8ae7" containerName="nova-metadata-log" containerID="cri-o://a03dc47ea5678d35499c7d2c162e59903757ffa22b0c3abb3b985027ba48121d" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.649086 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b4c84d72-3209-4925-9eb2-cbebcd1e8ae7" containerName="nova-metadata-metadata" containerID="cri-o://88cf151cf804256f12f3d9496a2faf5cc39d58a9955cca558c56b93f8e9f6281" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.663293 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-644b6c944d-sd84t"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.663596 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-644b6c944d-sd84t" podUID="a29676ba-4d56-4b2e-a92f-c83b5f25345a" containerName="barbican-keystone-listener-log" containerID="cri-o://f25eb8eb7f0c672012e6c95abd29402fdd548aeb978e465c3f54b8838db97478" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.663735 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-644b6c944d-sd84t" podUID="a29676ba-4d56-4b2e-a92f-c83b5f25345a" containerName="barbican-keystone-listener" containerID="cri-o://450844f0ccdffc6d7d907a51da1ae77807337385f064e077ac7579465c3daaf4" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.671702 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-9b3b-account-create-update-vh6fj"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.683116 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-fdswd" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.693166 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-9b3b-account-create-update-vh6fj"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.694273 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-config\") pod \"988b3ee2-147b-4ebc-9d31-42bdaf144bc5\" (UID: \"988b3ee2-147b-4ebc-9d31-42bdaf144bc5\") " Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.694332 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-ovs-rundir\") pod \"988b3ee2-147b-4ebc-9d31-42bdaf144bc5\" (UID: \"988b3ee2-147b-4ebc-9d31-42bdaf144bc5\") " Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.694375 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-combined-ca-bundle\") pod \"988b3ee2-147b-4ebc-9d31-42bdaf144bc5\" (UID: \"988b3ee2-147b-4ebc-9d31-42bdaf144bc5\") " Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.694487 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-metrics-certs-tls-certs\") pod \"988b3ee2-147b-4ebc-9d31-42bdaf144bc5\" (UID: \"988b3ee2-147b-4ebc-9d31-42bdaf144bc5\") " Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.694562 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vx2xq\" (UniqueName: \"kubernetes.io/projected/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-kube-api-access-vx2xq\") pod \"988b3ee2-147b-4ebc-9d31-42bdaf144bc5\" (UID: \"988b3ee2-147b-4ebc-9d31-42bdaf144bc5\") " Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.694578 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-ovn-rundir\") pod \"988b3ee2-147b-4ebc-9d31-42bdaf144bc5\" (UID: \"988b3ee2-147b-4ebc-9d31-42bdaf144bc5\") " Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.695317 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-config" (OuterVolumeSpecName: "config") pod "988b3ee2-147b-4ebc-9d31-42bdaf144bc5" (UID: "988b3ee2-147b-4ebc-9d31-42bdaf144bc5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.695564 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "988b3ee2-147b-4ebc-9d31-42bdaf144bc5" (UID: "988b3ee2-147b-4ebc-9d31-42bdaf144bc5"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.698493 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-ovs-rundir" (OuterVolumeSpecName: "ovs-rundir") pod "988b3ee2-147b-4ebc-9d31-42bdaf144bc5" (UID: "988b3ee2-147b-4ebc-9d31-42bdaf144bc5"). InnerVolumeSpecName "ovs-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.719789 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-kube-api-access-vx2xq" (OuterVolumeSpecName: "kube-api-access-vx2xq") pod "988b3ee2-147b-4ebc-9d31-42bdaf144bc5" (UID: "988b3ee2-147b-4ebc-9d31-42bdaf144bc5"). InnerVolumeSpecName "kube-api-access-vx2xq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.720094 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.720627 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc" containerName="nova-api-log" containerID="cri-o://a36128483858c51bdf228f1c485a058ce56e3cb51fe588687795efca501f2855" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.721152 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc" containerName="nova-api-api" containerID="cri-o://0251462325e49cea189f787676ef9b788f1da3cfefece6459ad475f7a74cd5c1" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.738251 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-66fdccd748-zk2qt"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.745529 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-66fdccd748-zk2qt" podUID="b53805c6-4e15-4580-a60d-1f0c9c1fcef6" containerName="barbican-api-log" containerID="cri-o://8100c888907e0c359672d9aa57c58750ef202eac34f6410af86c42eefa66cc49" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.746152 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-66fdccd748-zk2qt" podUID="b53805c6-4e15-4580-a60d-1f0c9c1fcef6" containerName="barbican-api" containerID="cri-o://dd895a8362663704ae95aaa9cadf69c10b404ecae29d602906e53e472c16265f" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.768773 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.774879 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "988b3ee2-147b-4ebc-9d31-42bdaf144bc5" (UID: "988b3ee2-147b-4ebc-9d31-42bdaf144bc5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.796763 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-var-run\") pod \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\" (UID: \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\") " Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.796845 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-combined-ca-bundle\") pod \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\" (UID: \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\") " Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.796917 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-ovn-controller-tls-certs\") pod \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\" (UID: \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\") " Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.796939 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-var-log-ovn\") pod \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\" (UID: \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\") " Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.797063 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-scripts\") pod \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\" (UID: \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\") " Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.797132 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nwps5\" (UniqueName: \"kubernetes.io/projected/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-kube-api-access-nwps5\") pod \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\" (UID: \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\") " Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.797147 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-var-run-ovn\") pod \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\" (UID: \"c88e1443-25c4-4e67-83d0-e43cef2b2e5c\") " Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.797675 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.797690 5003 reconciler_common.go:293] "Volume detached for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-ovs-rundir\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.797700 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.797709 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vx2xq\" (UniqueName: \"kubernetes.io/projected/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-kube-api-access-vx2xq\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.797719 5003 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.797762 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "c88e1443-25c4-4e67-83d0-e43cef2b2e5c" (UID: "c88e1443-25c4-4e67-83d0-e43cef2b2e5c"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.797788 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-var-run" (OuterVolumeSpecName: "var-run") pod "c88e1443-25c4-4e67-83d0-e43cef2b2e5c" (UID: "c88e1443-25c4-4e67-83d0-e43cef2b2e5c"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.798440 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-v7b9n"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.798525 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "c88e1443-25c4-4e67-83d0-e43cef2b2e5c" (UID: "c88e1443-25c4-4e67-83d0-e43cef2b2e5c"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.805370 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-scripts" (OuterVolumeSpecName: "scripts") pod "c88e1443-25c4-4e67-83d0-e43cef2b2e5c" (UID: "c88e1443-25c4-4e67-83d0-e43cef2b2e5c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.816040 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-v7b9n"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.826036 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-fqzhw"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.830519 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-kube-api-access-nwps5" (OuterVolumeSpecName: "kube-api-access-nwps5") pod "c88e1443-25c4-4e67-83d0-e43cef2b2e5c" (UID: "c88e1443-25c4-4e67-83d0-e43cef2b2e5c"). InnerVolumeSpecName "kube-api-access-nwps5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.848616 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-8552-account-create-update-69dtz"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.882470 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-fqzhw"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.889264 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-8552-account-create-update-69dtz"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.900872 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/627b5a59-3da1-4130-92b1-94fcfea8efd4-openstack-config\") pod \"627b5a59-3da1-4130-92b1-94fcfea8efd4\" (UID: \"627b5a59-3da1-4130-92b1-94fcfea8efd4\") " Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.900930 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/627b5a59-3da1-4130-92b1-94fcfea8efd4-openstack-config-secret\") pod \"627b5a59-3da1-4130-92b1-94fcfea8efd4\" (UID: \"627b5a59-3da1-4130-92b1-94fcfea8efd4\") " Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.900955 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xh6rd\" (UniqueName: \"kubernetes.io/projected/627b5a59-3da1-4130-92b1-94fcfea8efd4-kube-api-access-xh6rd\") pod \"627b5a59-3da1-4130-92b1-94fcfea8efd4\" (UID: \"627b5a59-3da1-4130-92b1-94fcfea8efd4\") " Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.901082 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/627b5a59-3da1-4130-92b1-94fcfea8efd4-combined-ca-bundle\") pod \"627b5a59-3da1-4130-92b1-94fcfea8efd4\" (UID: \"627b5a59-3da1-4130-92b1-94fcfea8efd4\") " Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.901840 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nwps5\" (UniqueName: \"kubernetes.io/projected/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-kube-api-access-nwps5\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.901853 5003 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-var-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.901862 5003 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-var-run\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.901871 5003 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-var-log-ovn\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.901879 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:21 crc kubenswrapper[5003]: E0104 12:13:21.908139 5003 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 04 12:13:21 crc kubenswrapper[5003]: E0104 12:13:21.908221 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/81193935-fcd0-4877-9d65-6155c1a888e2-config-data podName:81193935-fcd0-4877-9d65-6155c1a888e2 nodeName:}" failed. No retries permitted until 2026-01-04 12:13:23.908202364 +0000 UTC m=+1519.381232205 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/81193935-fcd0-4877-9d65-6155c1a888e2-config-data") pod "rabbitmq-server-0" (UID: "81193935-fcd0-4877-9d65-6155c1a888e2") : configmap "rabbitmq-config-data" not found Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.920270 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.920499 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="890e99fd-959d-4946-9716-acfe78278964" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://5c16e890258cb037b15eed8bde5425015cdc0c2f4dd920723e36c9b49ced1ff0" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.929291 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/627b5a59-3da1-4130-92b1-94fcfea8efd4-kube-api-access-xh6rd" (OuterVolumeSpecName: "kube-api-access-xh6rd") pod "627b5a59-3da1-4130-92b1-94fcfea8efd4" (UID: "627b5a59-3da1-4130-92b1-94fcfea8efd4"). InnerVolumeSpecName "kube-api-access-xh6rd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.929560 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-cell1-galera-0" podUID="e9f73829-d0a1-4e4d-8f5a-755d63ce1caa" containerName="galera" containerID="cri-o://7b0afbed514d598037465207941500c8d671c3e849c508673ddb681f2032cb98" gracePeriod=30 Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.945698 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-nvbp2"] Jan 04 12:13:21 crc kubenswrapper[5003]: I0104 12:13:21.987433 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c88e1443-25c4-4e67-83d0-e43cef2b2e5c" (UID: "c88e1443-25c4-4e67-83d0-e43cef2b2e5c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.008795 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xh6rd\" (UniqueName: \"kubernetes.io/projected/627b5a59-3da1-4130-92b1-94fcfea8efd4-kube-api-access-xh6rd\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.008830 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.013364 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fcd6f8f8f-7kqd7" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.020085 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-nvbp2"] Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.022878 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "988b3ee2-147b-4ebc-9d31-42bdaf144bc5" (UID: "988b3ee2-147b-4ebc-9d31-42bdaf144bc5"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.023484 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/627b5a59-3da1-4130-92b1-94fcfea8efd4-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "627b5a59-3da1-4130-92b1-94fcfea8efd4" (UID: "627b5a59-3da1-4130-92b1-94fcfea8efd4"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.056239 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-daa1-account-create-update-m44v2"] Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.062126 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-553f-account-create-update-kjhcr"] Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.096060 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-6885ccbc9-jntqm" podUID="afad0966-8385-444b-9eed-8418c0a49b2a" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.170:8080/healthcheck\": dial tcp 10.217.0.170:8080: connect: connection refused" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.096583 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-6885ccbc9-jntqm" podUID="afad0966-8385-444b-9eed-8418c0a49b2a" containerName="proxy-server" probeResult="failure" output="Get \"https://10.217.0.170:8080/healthcheck\": dial tcp 10.217.0.170:8080: connect: connection refused" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.100272 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/627b5a59-3da1-4130-92b1-94fcfea8efd4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "627b5a59-3da1-4130-92b1-94fcfea8efd4" (UID: "627b5a59-3da1-4130-92b1-94fcfea8efd4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.103150 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-6hmnw"] Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.109157 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-6hmnw"] Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.109856 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6mxl\" (UniqueName: \"kubernetes.io/projected/67adf34a-962a-435b-8e35-ae1387c097b6-kube-api-access-x6mxl\") pod \"67adf34a-962a-435b-8e35-ae1387c097b6\" (UID: \"67adf34a-962a-435b-8e35-ae1387c097b6\") " Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.109892 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/67adf34a-962a-435b-8e35-ae1387c097b6-ovsdbserver-sb\") pod \"67adf34a-962a-435b-8e35-ae1387c097b6\" (UID: \"67adf34a-962a-435b-8e35-ae1387c097b6\") " Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.109929 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/67adf34a-962a-435b-8e35-ae1387c097b6-dns-swift-storage-0\") pod \"67adf34a-962a-435b-8e35-ae1387c097b6\" (UID: \"67adf34a-962a-435b-8e35-ae1387c097b6\") " Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.109972 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/67adf34a-962a-435b-8e35-ae1387c097b6-ovsdbserver-nb\") pod \"67adf34a-962a-435b-8e35-ae1387c097b6\" (UID: \"67adf34a-962a-435b-8e35-ae1387c097b6\") " Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.110077 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/67adf34a-962a-435b-8e35-ae1387c097b6-dns-svc\") pod \"67adf34a-962a-435b-8e35-ae1387c097b6\" (UID: \"67adf34a-962a-435b-8e35-ae1387c097b6\") " Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.110112 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67adf34a-962a-435b-8e35-ae1387c097b6-config\") pod \"67adf34a-962a-435b-8e35-ae1387c097b6\" (UID: \"67adf34a-962a-435b-8e35-ae1387c097b6\") " Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.110691 5003 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/988b3ee2-147b-4ebc-9d31-42bdaf144bc5-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.110703 5003 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/627b5a59-3da1-4130-92b1-94fcfea8efd4-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.110712 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/627b5a59-3da1-4130-92b1-94fcfea8efd4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.154996 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-5f14-account-create-update-xsz5w"] Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.171114 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67adf34a-962a-435b-8e35-ae1387c097b6-kube-api-access-x6mxl" (OuterVolumeSpecName: "kube-api-access-x6mxl") pod "67adf34a-962a-435b-8e35-ae1387c097b6" (UID: "67adf34a-962a-435b-8e35-ae1387c097b6"). InnerVolumeSpecName "kube-api-access-x6mxl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.193189 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/627b5a59-3da1-4130-92b1-94fcfea8efd4-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "627b5a59-3da1-4130-92b1-94fcfea8efd4" (UID: "627b5a59-3da1-4130-92b1-94fcfea8efd4"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.199060 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-70b4-account-create-update-gwtlx"] Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.215124 5003 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/627b5a59-3da1-4130-92b1-94fcfea8efd4-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.215176 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6mxl\" (UniqueName: \"kubernetes.io/projected/67adf34a-962a-435b-8e35-ae1387c097b6-kube-api-access-x6mxl\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.217002 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-xw5xf"] Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.248136 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-xw5xf"] Jan 04 12:13:22 crc kubenswrapper[5003]: E0104 12:13:22.250792 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7b0afbed514d598037465207941500c8d671c3e849c508673ddb681f2032cb98" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.251498 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67adf34a-962a-435b-8e35-ae1387c097b6-config" (OuterVolumeSpecName: "config") pod "67adf34a-962a-435b-8e35-ae1387c097b6" (UID: "67adf34a-962a-435b-8e35-ae1387c097b6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:22 crc kubenswrapper[5003]: E0104 12:13:22.254448 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7b0afbed514d598037465207941500c8d671c3e849c508673ddb681f2032cb98" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.262582 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-ovn-controller-tls-certs" (OuterVolumeSpecName: "ovn-controller-tls-certs") pod "c88e1443-25c4-4e67-83d0-e43cef2b2e5c" (UID: "c88e1443-25c4-4e67-83d0-e43cef2b2e5c"). InnerVolumeSpecName "ovn-controller-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:22 crc kubenswrapper[5003]: E0104 12:13:22.264361 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7b0afbed514d598037465207941500c8d671c3e849c508673ddb681f2032cb98" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 04 12:13:22 crc kubenswrapper[5003]: E0104 12:13:22.264407 5003 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/openstack-cell1-galera-0" podUID="e9f73829-d0a1-4e4d-8f5a-755d63ce1caa" containerName="galera" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.276715 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67adf34a-962a-435b-8e35-ae1387c097b6-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "67adf34a-962a-435b-8e35-ae1387c097b6" (UID: "67adf34a-962a-435b-8e35-ae1387c097b6"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.285148 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67adf34a-962a-435b-8e35-ae1387c097b6-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "67adf34a-962a-435b-8e35-ae1387c097b6" (UID: "67adf34a-962a-435b-8e35-ae1387c097b6"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.289863 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67adf34a-962a-435b-8e35-ae1387c097b6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "67adf34a-962a-435b-8e35-ae1387c097b6" (UID: "67adf34a-962a-435b-8e35-ae1387c097b6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.297306 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-lph7c"] Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.321382 5003 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/67adf34a-962a-435b-8e35-ae1387c097b6-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.321429 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67adf34a-962a-435b-8e35-ae1387c097b6-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.321441 5003 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/67adf34a-962a-435b-8e35-ae1387c097b6-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.321453 5003 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/67adf34a-962a-435b-8e35-ae1387c097b6-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.321464 5003 reconciler_common.go:293] "Volume detached for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/c88e1443-25c4-4e67-83d0-e43cef2b2e5c-ovn-controller-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.326797 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-lph7c"] Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.336990 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.337423 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="295f88c8-79ac-463f-85e3-d98dc15dd06f" containerName="nova-scheduler-scheduler" containerID="cri-o://c3b3aa191257674d28511eb56f215bdb07ddd4b9442282756b29282013c6bebd" gracePeriod=30 Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.353096 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-nb5kc"] Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.363104 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.363891 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="ff44c8db-792b-491a-879a-7e1ae7717a0f" containerName="nova-cell1-conductor-conductor" containerID="cri-o://c036815a0e7e6c9f9f1d469ae796fad9bf4425d1308d4bca8f8c39aa199aae5a" gracePeriod=30 Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.369502 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67adf34a-962a-435b-8e35-ae1387c097b6-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "67adf34a-962a-435b-8e35-ae1387c097b6" (UID: "67adf34a-962a-435b-8e35-ae1387c097b6"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.377985 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-nb5kc"] Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.389572 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-7z7gd"] Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.408657 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.408953 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="28122a6a-8b54-4ff3-9092-a1f7439a35cf" containerName="nova-cell0-conductor-conductor" containerID="cri-o://716e1877196d93764346bb1a22d3036cf3d4fe6efcbf6126e0f1878e472f9fc4" gracePeriod=30 Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.412866 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.421203 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_c5c96797-3987-489a-8ce2-510caa11262c/ovsdbserver-nb/0.log" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.421305 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.423666 5003 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/67adf34a-962a-435b-8e35-ae1387c097b6-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.424978 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-7z7gd"] Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.431782 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.466761 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-553f-account-create-update-kjhcr"] Jan 04 12:13:22 crc kubenswrapper[5003]: E0104 12:13:22.489192 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3 is running failed: container process not found" containerID="4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:13:22 crc kubenswrapper[5003]: E0104 12:13:22.491143 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dc2a9fd5a4e5efaaf4816f8b92204d68eeef36829b12562226d86ab9c2026294" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:13:22 crc kubenswrapper[5003]: E0104 12:13:22.491274 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3 is running failed: container process not found" containerID="4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:13:22 crc kubenswrapper[5003]: E0104 12:13:22.492285 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3 is running failed: container process not found" containerID="4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:13:22 crc kubenswrapper[5003]: E0104 12:13:22.492349 5003 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-99mjg" podUID="d91cd6f4-0e52-4519-b337-9a7c2779b7f1" containerName="ovsdb-server" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.505915 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="81193935-fcd0-4877-9d65-6155c1a888e2" containerName="rabbitmq" containerID="cri-o://f0dca325e90af7570f19f9ac0610466deeda038b06d2c3ca9f19a6c46586b480" gracePeriod=604800 Jan 04 12:13:22 crc kubenswrapper[5003]: W0104 12:13:22.509710 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9f2d1a12_b4ce_48c2_88b0_5ae881760963.slice/crio-27f22212fb6f4e2a634420812b10a71db275049351456b55108ab2c11eb76cc5 WatchSource:0}: Error finding container 27f22212fb6f4e2a634420812b10a71db275049351456b55108ab2c11eb76cc5: Status 404 returned error can't find the container with id 27f22212fb6f4e2a634420812b10a71db275049351456b55108ab2c11eb76cc5 Jan 04 12:13:22 crc kubenswrapper[5003]: E0104 12:13:22.522176 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dc2a9fd5a4e5efaaf4816f8b92204d68eeef36829b12562226d86ab9c2026294" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.524400 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c5c96797-3987-489a-8ce2-510caa11262c-scripts\") pod \"c5c96797-3987-489a-8ce2-510caa11262c\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") " Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.524450 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5c96797-3987-489a-8ce2-510caa11262c-metrics-certs-tls-certs\") pod \"c5c96797-3987-489a-8ce2-510caa11262c\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") " Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.524527 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"741d1d6f-4c11-4352-ba73-c8a13a465c95\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") " Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.524548 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5c96797-3987-489a-8ce2-510caa11262c-combined-ca-bundle\") pod \"c5c96797-3987-489a-8ce2-510caa11262c\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") " Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.524589 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5c96797-3987-489a-8ce2-510caa11262c-config\") pod \"c5c96797-3987-489a-8ce2-510caa11262c\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") " Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.524631 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c5c96797-3987-489a-8ce2-510caa11262c-ovsdb-rundir\") pod \"c5c96797-3987-489a-8ce2-510caa11262c\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") " Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.524664 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/741d1d6f-4c11-4352-ba73-c8a13a465c95-config\") pod \"741d1d6f-4c11-4352-ba73-c8a13a465c95\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") " Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.524724 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5c96797-3987-489a-8ce2-510caa11262c-ovsdbserver-nb-tls-certs\") pod \"c5c96797-3987-489a-8ce2-510caa11262c\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") " Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.524801 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/741d1d6f-4c11-4352-ba73-c8a13a465c95-ovsdbserver-sb-tls-certs\") pod \"741d1d6f-4c11-4352-ba73-c8a13a465c95\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") " Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.524819 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nffl6\" (UniqueName: \"kubernetes.io/projected/741d1d6f-4c11-4352-ba73-c8a13a465c95-kube-api-access-nffl6\") pod \"741d1d6f-4c11-4352-ba73-c8a13a465c95\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") " Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.524842 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/741d1d6f-4c11-4352-ba73-c8a13a465c95-metrics-certs-tls-certs\") pod \"741d1d6f-4c11-4352-ba73-c8a13a465c95\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") " Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.524863 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/741d1d6f-4c11-4352-ba73-c8a13a465c95-combined-ca-bundle\") pod \"741d1d6f-4c11-4352-ba73-c8a13a465c95\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") " Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.524933 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"c5c96797-3987-489a-8ce2-510caa11262c\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") " Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.524948 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/741d1d6f-4c11-4352-ba73-c8a13a465c95-scripts\") pod \"741d1d6f-4c11-4352-ba73-c8a13a465c95\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") " Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.524964 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/741d1d6f-4c11-4352-ba73-c8a13a465c95-ovsdb-rundir\") pod \"741d1d6f-4c11-4352-ba73-c8a13a465c95\" (UID: \"741d1d6f-4c11-4352-ba73-c8a13a465c95\") " Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.524989 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b67jl\" (UniqueName: \"kubernetes.io/projected/c5c96797-3987-489a-8ce2-510caa11262c-kube-api-access-b67jl\") pod \"c5c96797-3987-489a-8ce2-510caa11262c\" (UID: \"c5c96797-3987-489a-8ce2-510caa11262c\") " Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.527698 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5c96797-3987-489a-8ce2-510caa11262c-scripts" (OuterVolumeSpecName: "scripts") pod "c5c96797-3987-489a-8ce2-510caa11262c" (UID: "c5c96797-3987-489a-8ce2-510caa11262c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.531261 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/741d1d6f-4c11-4352-ba73-c8a13a465c95-config" (OuterVolumeSpecName: "config") pod "741d1d6f-4c11-4352-ba73-c8a13a465c95" (UID: "741d1d6f-4c11-4352-ba73-c8a13a465c95"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.532404 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "c5c96797-3987-489a-8ce2-510caa11262c" (UID: "c5c96797-3987-489a-8ce2-510caa11262c"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.532915 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/741d1d6f-4c11-4352-ba73-c8a13a465c95-scripts" (OuterVolumeSpecName: "scripts") pod "741d1d6f-4c11-4352-ba73-c8a13a465c95" (UID: "741d1d6f-4c11-4352-ba73-c8a13a465c95"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.533309 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/741d1d6f-4c11-4352-ba73-c8a13a465c95-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "741d1d6f-4c11-4352-ba73-c8a13a465c95" (UID: "741d1d6f-4c11-4352-ba73-c8a13a465c95"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:13:22 crc kubenswrapper[5003]: W0104 12:13:22.533406 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2f54e309_ce75_4ab4_8fad_f7c1ad1c9cb7.slice/crio-9433b5ede5eb6c7e4918ba2202d065b8cf4e5b7dc8b9c96f7bacc4e830095f73 WatchSource:0}: Error finding container 9433b5ede5eb6c7e4918ba2202d065b8cf4e5b7dc8b9c96f7bacc4e830095f73: Status 404 returned error can't find the container with id 9433b5ede5eb6c7e4918ba2202d065b8cf4e5b7dc8b9c96f7bacc4e830095f73 Jan 04 12:13:22 crc kubenswrapper[5003]: E0104 12:13:22.535397 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dc2a9fd5a4e5efaaf4816f8b92204d68eeef36829b12562226d86ab9c2026294" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:13:22 crc kubenswrapper[5003]: E0104 12:13:22.535478 5003 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-99mjg" podUID="d91cd6f4-0e52-4519-b337-9a7c2779b7f1" containerName="ovs-vswitchd" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.555539 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5c96797-3987-489a-8ce2-510caa11262c-config" (OuterVolumeSpecName: "config") pod "c5c96797-3987-489a-8ce2-510caa11262c" (UID: "c5c96797-3987-489a-8ce2-510caa11262c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.555696 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5c96797-3987-489a-8ce2-510caa11262c-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "c5c96797-3987-489a-8ce2-510caa11262c" (UID: "c5c96797-3987-489a-8ce2-510caa11262c"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.565577 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "741d1d6f-4c11-4352-ba73-c8a13a465c95" (UID: "741d1d6f-4c11-4352-ba73-c8a13a465c95"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.565610 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5c96797-3987-489a-8ce2-510caa11262c-kube-api-access-b67jl" (OuterVolumeSpecName: "kube-api-access-b67jl") pod "c5c96797-3987-489a-8ce2-510caa11262c" (UID: "c5c96797-3987-489a-8ce2-510caa11262c"). InnerVolumeSpecName "kube-api-access-b67jl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.572124 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/741d1d6f-4c11-4352-ba73-c8a13a465c95-kube-api-access-nffl6" (OuterVolumeSpecName: "kube-api-access-nffl6") pod "741d1d6f-4c11-4352-ba73-c8a13a465c95" (UID: "741d1d6f-4c11-4352-ba73-c8a13a465c95"). InnerVolumeSpecName "kube-api-access-nffl6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:22 crc kubenswrapper[5003]: I0104 12:13:22.574880 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-70b4-account-create-update-gwtlx"] Jan 04 12:13:23 crc kubenswrapper[5003]: E0104 12:13:22.598365 5003 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 04 12:13:23 crc kubenswrapper[5003]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 04 12:13:23 crc kubenswrapper[5003]: Jan 04 12:13:23 crc kubenswrapper[5003]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 04 12:13:23 crc kubenswrapper[5003]: Jan 04 12:13:23 crc kubenswrapper[5003]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 04 12:13:23 crc kubenswrapper[5003]: Jan 04 12:13:23 crc kubenswrapper[5003]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 04 12:13:23 crc kubenswrapper[5003]: Jan 04 12:13:23 crc kubenswrapper[5003]: if [ -n "glance" ]; then Jan 04 12:13:23 crc kubenswrapper[5003]: GRANT_DATABASE="glance" Jan 04 12:13:23 crc kubenswrapper[5003]: else Jan 04 12:13:23 crc kubenswrapper[5003]: GRANT_DATABASE="*" Jan 04 12:13:23 crc kubenswrapper[5003]: fi Jan 04 12:13:23 crc kubenswrapper[5003]: Jan 04 12:13:23 crc kubenswrapper[5003]: # going for maximum compatibility here: Jan 04 12:13:23 crc kubenswrapper[5003]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 04 12:13:23 crc kubenswrapper[5003]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 04 12:13:23 crc kubenswrapper[5003]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 04 12:13:23 crc kubenswrapper[5003]: # support updates Jan 04 12:13:23 crc kubenswrapper[5003]: Jan 04 12:13:23 crc kubenswrapper[5003]: $MYSQL_CMD < logger="UnhandledError" Jan 04 12:13:23 crc kubenswrapper[5003]: E0104 12:13:22.599774 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"glance-db-secret\\\" not found\"" pod="openstack/glance-70b4-account-create-update-gwtlx" podUID="2f54e309-ce75-4ab4-8fad-f7c1ad1c9cb7" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.599939 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-553f-account-create-update-kjhcr" event={"ID":"28114f85-1d9b-4e71-be5f-d721f06c70dc","Type":"ContainerStarted","Data":"9c9a7db0826e756a4e32f1b6a7e9ea4e85bc952ced92dd7c8d3f8d8180631009"} Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.600479 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5c96797-3987-489a-8ce2-510caa11262c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c5c96797-3987-489a-8ce2-510caa11262c" (UID: "c5c96797-3987-489a-8ce2-510caa11262c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:23 crc kubenswrapper[5003]: E0104 12:13:22.612500 5003 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 04 12:13:23 crc kubenswrapper[5003]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 04 12:13:23 crc kubenswrapper[5003]: Jan 04 12:13:23 crc kubenswrapper[5003]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 04 12:13:23 crc kubenswrapper[5003]: Jan 04 12:13:23 crc kubenswrapper[5003]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 04 12:13:23 crc kubenswrapper[5003]: Jan 04 12:13:23 crc kubenswrapper[5003]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 04 12:13:23 crc kubenswrapper[5003]: Jan 04 12:13:23 crc kubenswrapper[5003]: if [ -n "barbican" ]; then Jan 04 12:13:23 crc kubenswrapper[5003]: GRANT_DATABASE="barbican" Jan 04 12:13:23 crc kubenswrapper[5003]: else Jan 04 12:13:23 crc kubenswrapper[5003]: GRANT_DATABASE="*" Jan 04 12:13:23 crc kubenswrapper[5003]: fi Jan 04 12:13:23 crc kubenswrapper[5003]: Jan 04 12:13:23 crc kubenswrapper[5003]: # going for maximum compatibility here: Jan 04 12:13:23 crc kubenswrapper[5003]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 04 12:13:23 crc kubenswrapper[5003]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 04 12:13:23 crc kubenswrapper[5003]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 04 12:13:23 crc kubenswrapper[5003]: # support updates Jan 04 12:13:23 crc kubenswrapper[5003]: Jan 04 12:13:23 crc kubenswrapper[5003]: $MYSQL_CMD < logger="UnhandledError" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.613349 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-kn44b"] Jan 04 12:13:23 crc kubenswrapper[5003]: E0104 12:13:22.614080 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"barbican-db-secret\\\" not found\"" pod="openstack/barbican-553f-account-create-update-kjhcr" podUID="28114f85-1d9b-4e71-be5f-d721f06c70dc" Jan 04 12:13:23 crc kubenswrapper[5003]: E0104 12:13:22.615564 5003 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 04 12:13:23 crc kubenswrapper[5003]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 04 12:13:23 crc kubenswrapper[5003]: Jan 04 12:13:23 crc kubenswrapper[5003]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 04 12:13:23 crc kubenswrapper[5003]: Jan 04 12:13:23 crc kubenswrapper[5003]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 04 12:13:23 crc kubenswrapper[5003]: Jan 04 12:13:23 crc kubenswrapper[5003]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 04 12:13:23 crc kubenswrapper[5003]: Jan 04 12:13:23 crc kubenswrapper[5003]: if [ -n "nova_api" ]; then Jan 04 12:13:23 crc kubenswrapper[5003]: GRANT_DATABASE="nova_api" Jan 04 12:13:23 crc kubenswrapper[5003]: else Jan 04 12:13:23 crc kubenswrapper[5003]: GRANT_DATABASE="*" Jan 04 12:13:23 crc kubenswrapper[5003]: fi Jan 04 12:13:23 crc kubenswrapper[5003]: Jan 04 12:13:23 crc kubenswrapper[5003]: # going for maximum compatibility here: Jan 04 12:13:23 crc kubenswrapper[5003]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 04 12:13:23 crc kubenswrapper[5003]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 04 12:13:23 crc kubenswrapper[5003]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 04 12:13:23 crc kubenswrapper[5003]: # support updates Jan 04 12:13:23 crc kubenswrapper[5003]: Jan 04 12:13:23 crc kubenswrapper[5003]: $MYSQL_CMD < logger="UnhandledError" Jan 04 12:13:23 crc kubenswrapper[5003]: E0104 12:13:22.617238 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-api-db-secret\\\" not found\"" pod="openstack/nova-api-5f14-account-create-update-xsz5w" podUID="c304f0e5-d232-4549-ab8f-f11f5008f903" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.624151 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/741d1d6f-4c11-4352-ba73-c8a13a465c95-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "741d1d6f-4c11-4352-ba73-c8a13a465c95" (UID: "741d1d6f-4c11-4352-ba73-c8a13a465c95"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.624274 5003 generic.go:334] "Generic (PLEG): container finished" podID="43c1199f-e162-4062-a972-417afa58eaa6" containerID="2faff7fc607a82782ff7622c7373fabbd6ab09171776a5361884986f16ec6df5" exitCode=143 Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.624383 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"43c1199f-e162-4062-a972-417afa58eaa6","Type":"ContainerDied","Data":"2faff7fc607a82782ff7622c7373fabbd6ab09171776a5361884986f16ec6df5"} Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.627065 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c5c96797-3987-489a-8ce2-510caa11262c-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.627094 5003 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.627104 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5c96797-3987-489a-8ce2-510caa11262c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.627116 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5c96797-3987-489a-8ce2-510caa11262c-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.627125 5003 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c5c96797-3987-489a-8ce2-510caa11262c-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.627134 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/741d1d6f-4c11-4352-ba73-c8a13a465c95-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.627143 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nffl6\" (UniqueName: \"kubernetes.io/projected/741d1d6f-4c11-4352-ba73-c8a13a465c95-kube-api-access-nffl6\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.627152 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/741d1d6f-4c11-4352-ba73-c8a13a465c95-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.627166 5003 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.627175 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/741d1d6f-4c11-4352-ba73-c8a13a465c95-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.627186 5003 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/741d1d6f-4c11-4352-ba73-c8a13a465c95-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.627196 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b67jl\" (UniqueName: \"kubernetes.io/projected/c5c96797-3987-489a-8ce2-510caa11262c-kube-api-access-b67jl\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.642360 5003 generic.go:334] "Generic (PLEG): container finished" podID="a29676ba-4d56-4b2e-a92f-c83b5f25345a" containerID="f25eb8eb7f0c672012e6c95abd29402fdd548aeb978e465c3f54b8838db97478" exitCode=143 Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.642505 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-644b6c944d-sd84t" event={"ID":"a29676ba-4d56-4b2e-a92f-c83b5f25345a","Type":"ContainerDied","Data":"f25eb8eb7f0c672012e6c95abd29402fdd548aeb978e465c3f54b8838db97478"} Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.652497 5003 generic.go:334] "Generic (PLEG): container finished" podID="b53805c6-4e15-4580-a60d-1f0c9c1fcef6" containerID="8100c888907e0c359672d9aa57c58750ef202eac34f6410af86c42eefa66cc49" exitCode=143 Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.652586 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-66fdccd748-zk2qt" event={"ID":"b53805c6-4e15-4580-a60d-1f0c9c1fcef6","Type":"ContainerDied","Data":"8100c888907e0c359672d9aa57c58750ef202eac34f6410af86c42eefa66cc49"} Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.657182 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-70b4-account-create-update-gwtlx" event={"ID":"2f54e309-ce75-4ab4-8fad-f7c1ad1c9cb7","Type":"ContainerStarted","Data":"9433b5ede5eb6c7e4918ba2202d065b8cf4e5b7dc8b9c96f7bacc4e830095f73"} Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.697717 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-5f14-account-create-update-xsz5w"] Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.714378 5003 generic.go:334] "Generic (PLEG): container finished" podID="2c899132-ee49-4ab3-89ea-95f0bfcb71ab" containerID="8dc168dd5d62f051b24906056242c3db9d4e7a18cca4bd69532834764b2e4b47" exitCode=143 Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.714498 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2c899132-ee49-4ab3-89ea-95f0bfcb71ab","Type":"ContainerDied","Data":"8dc168dd5d62f051b24906056242c3db9d4e7a18cca4bd69532834764b2e4b47"} Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.718897 5003 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.728612 5003 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.736772 5003 generic.go:334] "Generic (PLEG): container finished" podID="f0dcef7e-0621-4399-b967-5d5f90dd695f" containerID="b3d17d863faaeeeaa347265306da2ca818931952ebab5c074c74f4eaf33efddb" exitCode=143 Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.736921 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5d455558d5-f58qc" event={"ID":"f0dcef7e-0621-4399-b967-5d5f90dd695f","Type":"ContainerDied","Data":"b3d17d863faaeeeaa347265306da2ca818931952ebab5c074c74f4eaf33efddb"} Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.737758 5003 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.751948 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/741d1d6f-4c11-4352-ba73-c8a13a465c95-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "741d1d6f-4c11-4352-ba73-c8a13a465c95" (UID: "741d1d6f-4c11-4352-ba73-c8a13a465c95"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.754699 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"741d1d6f-4c11-4352-ba73-c8a13a465c95","Type":"ContainerDied","Data":"868d00493b83018409f362ca624c82a60bd3be74e47068073991468ba8f540f7"} Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.754748 5003 scope.go:117] "RemoveContainer" containerID="a681cbc3fe9ff43a2d77746a80489f681d1a0e3c6c0988ff7acac4e32cd4e452" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.754917 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.755252 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5c96797-3987-489a-8ce2-510caa11262c-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "c5c96797-3987-489a-8ce2-510caa11262c" (UID: "c5c96797-3987-489a-8ce2-510caa11262c"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.767564 5003 generic.go:334] "Generic (PLEG): container finished" podID="23de8292-dc91-45db-8de9-59933352e3f2" containerID="312b6e09c8b9a68b781c0ec705d9f1a75feca3742a105d128ddcfa936c23e624" exitCode=0 Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.767767 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7d88cc4d8f-5mhr4" event={"ID":"23de8292-dc91-45db-8de9-59933352e3f2","Type":"ContainerDied","Data":"312b6e09c8b9a68b781c0ec705d9f1a75feca3742a105d128ddcfa936c23e624"} Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.779592 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5c96797-3987-489a-8ce2-510caa11262c-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "c5c96797-3987-489a-8ce2-510caa11262c" (UID: "c5c96797-3987-489a-8ce2-510caa11262c"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.783672 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/741d1d6f-4c11-4352-ba73-c8a13a465c95-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "741d1d6f-4c11-4352-ba73-c8a13a465c95" (UID: "741d1d6f-4c11-4352-ba73-c8a13a465c95"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.800508 5003 generic.go:334] "Generic (PLEG): container finished" podID="714823a9-560a-496c-b975-2db1099ad873" containerID="02447dca870462f4df40064d89c32f3a3d466ae10125a4bb87a31ddc1ecd1f9e" exitCode=143 Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.800613 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"714823a9-560a-496c-b975-2db1099ad873","Type":"ContainerDied","Data":"02447dca870462f4df40064d89c32f3a3d466ae10125a4bb87a31ddc1ecd1f9e"} Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.821557 5003 scope.go:117] "RemoveContainer" containerID="2a3d8a474b7ca0e0e5332e407803134102267d6de0d471b1d7bfcb2f97eaa762" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.827843 5003 generic.go:334] "Generic (PLEG): container finished" podID="d91cd6f4-0e52-4519-b337-9a7c2779b7f1" containerID="4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3" exitCode=0 Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.832835 5003 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/741d1d6f-4c11-4352-ba73-c8a13a465c95-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.832849 5003 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/741d1d6f-4c11-4352-ba73-c8a13a465c95-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.832860 5003 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.832869 5003 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5c96797-3987-489a-8ce2-510caa11262c-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.832877 5003 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5c96797-3987-489a-8ce2-510caa11262c-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:23 crc kubenswrapper[5003]: E0104 12:13:22.832937 5003 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 04 12:13:23 crc kubenswrapper[5003]: E0104 12:13:22.832980 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/829003dc-aa5e-43a6-a4f5-c578c73e76d4-config-data podName:829003dc-aa5e-43a6-a4f5-c578c73e76d4 nodeName:}" failed. No retries permitted until 2026-01-04 12:13:26.832964922 +0000 UTC m=+1522.305994763 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/829003dc-aa5e-43a6-a4f5-c578c73e76d4-config-data") pod "rabbitmq-cell1-server-0" (UID: "829003dc-aa5e-43a6-a4f5-c578c73e76d4") : configmap "rabbitmq-cell1-config-data" not found Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.834530 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03157841-c4cb-4d87-b168-ccbe7b52526d" path="/var/lib/kubelet/pods/03157841-c4cb-4d87-b168-ccbe7b52526d/volumes" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.835404 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756" path="/var/lib/kubelet/pods/190b8ae7-1cb7-46c7-ad59-9dd4f1a3b756/volumes" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.836245 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="209bf3b8-aa8b-46cb-b41f-1737c40c7522" path="/var/lib/kubelet/pods/209bf3b8-aa8b-46cb-b41f-1737c40c7522/volumes" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.837126 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="346862a9-1434-4cd7-bd85-1bbf1c02431d" path="/var/lib/kubelet/pods/346862a9-1434-4cd7-bd85-1bbf1c02431d/volumes" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.838960 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37399d86-126f-4327-94c9-f41df343ab62" path="/var/lib/kubelet/pods/37399d86-126f-4327-94c9-f41df343ab62/volumes" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.840473 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2" path="/var/lib/kubelet/pods/3f5e2a81-d4f0-4ed9-8970-1ffebe629ee2/volumes" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.841107 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4689f11c-e91e-4045-ae29-5377e6b8ae6e" path="/var/lib/kubelet/pods/4689f11c-e91e-4045-ae29-5377e6b8ae6e/volumes" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.842298 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="627b5a59-3da1-4130-92b1-94fcfea8efd4" path="/var/lib/kubelet/pods/627b5a59-3da1-4130-92b1-94fcfea8efd4/volumes" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.842980 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8100fc92-5887-4c6e-b489-6eabd0f88615" path="/var/lib/kubelet/pods/8100fc92-5887-4c6e-b489-6eabd0f88615/volumes" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.847393 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ff01a27-abff-42af-a7dd-f63b6ade45bf" path="/var/lib/kubelet/pods/8ff01a27-abff-42af-a7dd-f63b6ade45bf/volumes" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.848566 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96d2524c-4772-4eb6-b108-0513fce70ad8" path="/var/lib/kubelet/pods/96d2524c-4772-4eb6-b108-0513fce70ad8/volumes" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.851131 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-fdswd" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.851971 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99bfe48b-6290-49a4-b08e-f81e305df2bc" path="/var/lib/kubelet/pods/99bfe48b-6290-49a4-b08e-f81e305df2bc/volumes" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.852985 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a58094ef-de28-4ae5-84a9-b74684aca52e" path="/var/lib/kubelet/pods/a58094ef-de28-4ae5-84a9-b74684aca52e/volumes" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.855359 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af87a6b9-8483-47b6-a458-4cde08f820ab" path="/var/lib/kubelet/pods/af87a6b9-8483-47b6-a458-4cde08f820ab/volumes" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.858596 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a" path="/var/lib/kubelet/pods/c76d3acd-e992-42b8-8dcc-0f5f9ddbd02a/volumes" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.859444 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5838830-ab2d-4d0d-ab22-7448352db030" path="/var/lib/kubelet/pods/e5838830-ab2d-4d0d-ab22-7448352db030/volumes" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.860309 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f65226b6-f859-4771-9ed2-37808a129af2" path="/var/lib/kubelet/pods/f65226b6-f859-4771-9ed2-37808a129af2/volumes" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.864571 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-99mjg" event={"ID":"d91cd6f4-0e52-4519-b337-9a7c2779b7f1","Type":"ContainerDied","Data":"4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3"} Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.864607 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-fdswd" event={"ID":"c88e1443-25c4-4e67-83d0-e43cef2b2e5c","Type":"ContainerDied","Data":"de2220ff26a429e84b4767bccde7669818a28ec0b00af9fb7a2df1716de5644a"} Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.878561 5003 scope.go:117] "RemoveContainer" containerID="6388d2c6684a9b9e2ade7062fc176c21d7b16376b2604b8e0a56c525cd3036cb" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.888476 5003 generic.go:334] "Generic (PLEG): container finished" podID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerID="0f945f657e49547a265f6f68bbeab5475213ad26ac43900d9363d069d96d532d" exitCode=0 Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.888906 5003 generic.go:334] "Generic (PLEG): container finished" podID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerID="f2aa7e67c73edf19a995e58eaf9b8785bfd532521dc609235e9c65097cf71384" exitCode=0 Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.888918 5003 generic.go:334] "Generic (PLEG): container finished" podID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerID="dc00a97e29c22bf9a2a36c4af6d0c30fdef5266c6b5c76c89cee2d2f47cd401e" exitCode=0 Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.888926 5003 generic.go:334] "Generic (PLEG): container finished" podID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerID="8a065f77698f6ac06fadadaa5b0b12a9e635a05f7a2fd3ab7f7457eb16357d7d" exitCode=0 Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.888530 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e387635d-9ef2-4b1d-9303-0d762e8b282c","Type":"ContainerDied","Data":"0f945f657e49547a265f6f68bbeab5475213ad26ac43900d9363d069d96d532d"} Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.889050 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e387635d-9ef2-4b1d-9303-0d762e8b282c","Type":"ContainerDied","Data":"f2aa7e67c73edf19a995e58eaf9b8785bfd532521dc609235e9c65097cf71384"} Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.889067 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e387635d-9ef2-4b1d-9303-0d762e8b282c","Type":"ContainerDied","Data":"dc00a97e29c22bf9a2a36c4af6d0c30fdef5266c6b5c76c89cee2d2f47cd401e"} Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.889077 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e387635d-9ef2-4b1d-9303-0d762e8b282c","Type":"ContainerDied","Data":"8a065f77698f6ac06fadadaa5b0b12a9e635a05f7a2fd3ab7f7457eb16357d7d"} Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.894083 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-kn44b" event={"ID":"9f2d1a12-b4ce-48c2-88b0-5ae881760963","Type":"ContainerStarted","Data":"27f22212fb6f4e2a634420812b10a71db275049351456b55108ab2c11eb76cc5"} Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.897653 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_c5c96797-3987-489a-8ce2-510caa11262c/ovsdbserver-nb/0.log" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.897709 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"c5c96797-3987-489a-8ce2-510caa11262c","Type":"ContainerDied","Data":"54fd4657c38950a0854eb471f11d031bb99b9b9b6c0705a5482cd75be4082fbc"} Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.897789 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.900589 5003 generic.go:334] "Generic (PLEG): container finished" podID="afad0966-8385-444b-9eed-8418c0a49b2a" containerID="7f17c88fe39230076ac013b10c5ff4f44f766a32bb39a15570523798fd0cee22" exitCode=0 Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.900616 5003 generic.go:334] "Generic (PLEG): container finished" podID="afad0966-8385-444b-9eed-8418c0a49b2a" containerID="6615326903f6990ecae76e7fe834c4ecc3fe23051337c2d5af2308f338730040" exitCode=0 Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.900639 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6885ccbc9-jntqm" event={"ID":"afad0966-8385-444b-9eed-8418c0a49b2a","Type":"ContainerDied","Data":"7f17c88fe39230076ac013b10c5ff4f44f766a32bb39a15570523798fd0cee22"} Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.900666 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6885ccbc9-jntqm" event={"ID":"afad0966-8385-444b-9eed-8418c0a49b2a","Type":"ContainerDied","Data":"6615326903f6990ecae76e7fe834c4ecc3fe23051337c2d5af2308f338730040"} Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.902820 5003 generic.go:334] "Generic (PLEG): container finished" podID="0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc" containerID="a36128483858c51bdf228f1c485a058ce56e3cb51fe588687795efca501f2855" exitCode=143 Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.902870 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc","Type":"ContainerDied","Data":"a36128483858c51bdf228f1c485a058ce56e3cb51fe588687795efca501f2855"} Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.907475 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcd6f8f8f-7kqd7" event={"ID":"67adf34a-962a-435b-8e35-ae1387c097b6","Type":"ContainerDied","Data":"2136a2ccc126a4468037da941d378d56e8f116bc364f4b4fb53b5f12f50d9d9d"} Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.907632 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fcd6f8f8f-7kqd7" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.922174 5003 scope.go:117] "RemoveContainer" containerID="94bf6e10e5febc0e0054c7eb1c6a15116c187661c05d7923c6e30f3126e4daba" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.925651 5003 generic.go:334] "Generic (PLEG): container finished" podID="b4c84d72-3209-4925-9eb2-cbebcd1e8ae7" containerID="a03dc47ea5678d35499c7d2c162e59903757ffa22b0c3abb3b985027ba48121d" exitCode=143 Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.925766 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b4c84d72-3209-4925-9eb2-cbebcd1e8ae7","Type":"ContainerDied","Data":"a03dc47ea5678d35499c7d2c162e59903757ffa22b0c3abb3b985027ba48121d"} Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.927391 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.931114 5003 generic.go:334] "Generic (PLEG): container finished" podID="7d5d490f-d968-4237-8a63-7f7d01b8708d" containerID="08cc0e5f5513aaea421a5ce329e2f5d7400d11b3025b3985ed67ee4874cda18c" exitCode=0 Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.931250 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-ksmlf" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:22.932882 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7d5d490f-d968-4237-8a63-7f7d01b8708d","Type":"ContainerDied","Data":"08cc0e5f5513aaea421a5ce329e2f5d7400d11b3025b3985ed67ee4874cda18c"} Jan 04 12:13:23 crc kubenswrapper[5003]: E0104 12:13:22.936078 5003 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 04 12:13:23 crc kubenswrapper[5003]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 04 12:13:23 crc kubenswrapper[5003]: Jan 04 12:13:23 crc kubenswrapper[5003]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 04 12:13:23 crc kubenswrapper[5003]: Jan 04 12:13:23 crc kubenswrapper[5003]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 04 12:13:23 crc kubenswrapper[5003]: Jan 04 12:13:23 crc kubenswrapper[5003]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 04 12:13:23 crc kubenswrapper[5003]: Jan 04 12:13:23 crc kubenswrapper[5003]: if [ -n "neutron" ]; then Jan 04 12:13:23 crc kubenswrapper[5003]: GRANT_DATABASE="neutron" Jan 04 12:13:23 crc kubenswrapper[5003]: else Jan 04 12:13:23 crc kubenswrapper[5003]: GRANT_DATABASE="*" Jan 04 12:13:23 crc kubenswrapper[5003]: fi Jan 04 12:13:23 crc kubenswrapper[5003]: Jan 04 12:13:23 crc kubenswrapper[5003]: # going for maximum compatibility here: Jan 04 12:13:23 crc kubenswrapper[5003]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 04 12:13:23 crc kubenswrapper[5003]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 04 12:13:23 crc kubenswrapper[5003]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 04 12:13:23 crc kubenswrapper[5003]: # support updates Jan 04 12:13:23 crc kubenswrapper[5003]: Jan 04 12:13:23 crc kubenswrapper[5003]: $MYSQL_CMD < logger="UnhandledError" Jan 04 12:13:23 crc kubenswrapper[5003]: E0104 12:13:22.937226 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"neutron-db-secret\\\" not found\"" pod="openstack/neutron-daa1-account-create-update-m44v2" podUID="fba35580-43f2-4d96-8c52-6da2b5fdbd94" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:23.277665 5003 scope.go:117] "RemoveContainer" containerID="5bfd4afff231ef79e9cf1ed25e3e862879baafe9a1abee867c1c87bf017c640d" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:23.324113 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-fdswd"] Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:23.327776 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-fdswd"] Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:23.339664 5003 scope.go:117] "RemoveContainer" containerID="ce4043834be33197cdad3e565b1de177593fb0b0b9db95e5d58f8971339c2f91" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:23.345976 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:23.360146 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:23.372750 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fcd6f8f8f-7kqd7"] Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:23.373808 5003 scope.go:117] "RemoveContainer" containerID="bf568fc5453059ee85c44af4c2ca1bb686f34300564315d4ba9cc8f2758524b2" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:23.386387 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-fcd6f8f8f-7kqd7"] Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:23.395357 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-ksmlf"] Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:23.403999 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-metrics-ksmlf"] Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:23.405936 5003 scope.go:117] "RemoveContainer" containerID="72b3f2ee266e67b1cc81df1e410f3bd7f96dcf54ecba5f2d66edb9857899172e" Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:23.415246 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:23.419223 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:23.590168 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:23.590526 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42" containerName="ceilometer-central-agent" containerID="cri-o://2d1f1d056b61194569686724f2003dba8a8e5a0d07b9d3c97b835d48ef2f3c01" gracePeriod=30 Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:23.593365 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42" containerName="sg-core" containerID="cri-o://c595cd857ef9b98c086bda781fccff343323ff20c25f68b8460e16c62cecb388" gracePeriod=30 Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:23.593491 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42" containerName="ceilometer-notification-agent" containerID="cri-o://e66e7dbec82eeff67f71401a2ea11fe88a1996da2709ae8632f320656877be7d" gracePeriod=30 Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:23.593546 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42" containerName="proxy-httpd" containerID="cri-o://07b2e1874e1e6350f660adafaf47ead2fb632e268672667911eded298b5a3565" gracePeriod=30 Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:23.656939 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:23.658423 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="90ff3d24-4688-4c94-b5d6-c408e74c28ae" containerName="kube-state-metrics" containerID="cri-o://db90af2bbb86e3f491229ad487c735393946380d939a9f6fd713ad64285edc75" gracePeriod=30 Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:23.871118 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:23.882639 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/memcached-0" podUID="274fbbf3-b927-408e-9594-946f6ea71638" containerName="memcached" containerID="cri-o://7141902b674c0aba97218ef22d6317c2792d0dbefb479c305e2f864785706754" gracePeriod=30 Jan 04 12:13:23 crc kubenswrapper[5003]: E0104 12:13:23.911585 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c3b3aa191257674d28511eb56f215bdb07ddd4b9442282756b29282013c6bebd" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:23.911929 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-788e-account-create-update-bg6xj"] Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:23.960320 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-788e-account-create-update-bg6xj"] Jan 04 12:13:23 crc kubenswrapper[5003]: E0104 12:13:23.980106 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c3b3aa191257674d28511eb56f215bdb07ddd4b9442282756b29282013c6bebd" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 04 12:13:23 crc kubenswrapper[5003]: I0104 12:13:23.999006 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-788e-account-create-update-p27qs"] Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:23.999635 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="741d1d6f-4c11-4352-ba73-c8a13a465c95" containerName="openstack-network-exporter" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:23.999662 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="741d1d6f-4c11-4352-ba73-c8a13a465c95" containerName="openstack-network-exporter" Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:23.999682 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67adf34a-962a-435b-8e35-ae1387c097b6" containerName="dnsmasq-dns" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:23.999688 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="67adf34a-962a-435b-8e35-ae1387c097b6" containerName="dnsmasq-dns" Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:23.999700 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5c96797-3987-489a-8ce2-510caa11262c" containerName="ovsdbserver-nb" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:23.999707 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5c96797-3987-489a-8ce2-510caa11262c" containerName="ovsdbserver-nb" Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:23.999717 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5c96797-3987-489a-8ce2-510caa11262c" containerName="openstack-network-exporter" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:23.999724 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5c96797-3987-489a-8ce2-510caa11262c" containerName="openstack-network-exporter" Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:23.999739 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="988b3ee2-147b-4ebc-9d31-42bdaf144bc5" containerName="openstack-network-exporter" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:23.999746 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="988b3ee2-147b-4ebc-9d31-42bdaf144bc5" containerName="openstack-network-exporter" Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:23.999773 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67adf34a-962a-435b-8e35-ae1387c097b6" containerName="init" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:23.999780 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="67adf34a-962a-435b-8e35-ae1387c097b6" containerName="init" Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:23.999794 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="741d1d6f-4c11-4352-ba73-c8a13a465c95" containerName="ovsdbserver-sb" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:23.999800 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="741d1d6f-4c11-4352-ba73-c8a13a465c95" containerName="ovsdbserver-sb" Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:23.999826 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c88e1443-25c4-4e67-83d0-e43cef2b2e5c" containerName="ovn-controller" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:23.999835 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c88e1443-25c4-4e67-83d0-e43cef2b2e5c" containerName="ovn-controller" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.000098 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5c96797-3987-489a-8ce2-510caa11262c" containerName="ovsdbserver-nb" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.000122 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="988b3ee2-147b-4ebc-9d31-42bdaf144bc5" containerName="openstack-network-exporter" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.000133 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="741d1d6f-4c11-4352-ba73-c8a13a465c95" containerName="ovsdbserver-sb" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.000145 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="741d1d6f-4c11-4352-ba73-c8a13a465c95" containerName="openstack-network-exporter" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.000153 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5c96797-3987-489a-8ce2-510caa11262c" containerName="openstack-network-exporter" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.000163 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="67adf34a-962a-435b-8e35-ae1387c097b6" containerName="dnsmasq-dns" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.000170 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="c88e1443-25c4-4e67-83d0-e43cef2b2e5c" containerName="ovn-controller" Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:24.008363 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c3b3aa191257674d28511eb56f215bdb07ddd4b9442282756b29282013c6bebd" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:24.008483 5003 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="295f88c8-79ac-463f-85e3-d98dc15dd06f" containerName="nova-scheduler-scheduler" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.009346 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-788e-account-create-update-p27qs" Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:24.011751 5003 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:24.011872 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/81193935-fcd0-4877-9d65-6155c1a888e2-config-data podName:81193935-fcd0-4877-9d65-6155c1a888e2 nodeName:}" failed. No retries permitted until 2026-01-04 12:13:28.011846673 +0000 UTC m=+1523.484876504 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/81193935-fcd0-4877-9d65-6155c1a888e2-config-data") pod "rabbitmq-server-0" (UID: "81193935-fcd0-4877-9d65-6155c1a888e2") : configmap "rabbitmq-config-data" not found Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.021508 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:24.028305 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c036815a0e7e6c9f9f1d469ae796fad9bf4425d1308d4bca8f8c39aa199aae5a" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.054049 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6885ccbc9-jntqm" Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:24.065347 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c036815a0e7e6c9f9f1d469ae796fad9bf4425d1308d4bca8f8c39aa199aae5a" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.078028 5003 generic.go:334] "Generic (PLEG): container finished" podID="5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42" containerID="07b2e1874e1e6350f660adafaf47ead2fb632e268672667911eded298b5a3565" exitCode=0 Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.078083 5003 generic.go:334] "Generic (PLEG): container finished" podID="5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42" containerID="c595cd857ef9b98c086bda781fccff343323ff20c25f68b8460e16c62cecb388" exitCode=2 Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:24.078256 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c036815a0e7e6c9f9f1d469ae796fad9bf4425d1308d4bca8f8c39aa199aae5a" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:24.078310 5003 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="ff44c8db-792b-491a-879a-7e1ae7717a0f" containerName="nova-cell1-conductor-conductor" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.078610 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-788e-account-create-update-p27qs"] Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.078652 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42","Type":"ContainerDied","Data":"07b2e1874e1e6350f660adafaf47ead2fb632e268672667911eded298b5a3565"} Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.078687 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42","Type":"ContainerDied","Data":"c595cd857ef9b98c086bda781fccff343323ff20c25f68b8460e16c62cecb388"} Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.089515 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-lq5s5"] Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.102192 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-frvgd"] Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.107049 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-lq5s5"] Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.122347 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-frvgd"] Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.122656 5003 generic.go:334] "Generic (PLEG): container finished" podID="890e99fd-959d-4946-9716-acfe78278964" containerID="5c16e890258cb037b15eed8bde5425015cdc0c2f4dd920723e36c9b49ced1ff0" exitCode=0 Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.122754 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"890e99fd-959d-4946-9716-acfe78278964","Type":"ContainerDied","Data":"5c16e890258cb037b15eed8bde5425015cdc0c2f4dd920723e36c9b49ced1ff0"} Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.124648 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wl4jq\" (UniqueName: \"kubernetes.io/projected/afad0966-8385-444b-9eed-8418c0a49b2a-kube-api-access-wl4jq\") pod \"afad0966-8385-444b-9eed-8418c0a49b2a\" (UID: \"afad0966-8385-444b-9eed-8418c0a49b2a\") " Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.124723 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/afad0966-8385-444b-9eed-8418c0a49b2a-etc-swift\") pod \"afad0966-8385-444b-9eed-8418c0a49b2a\" (UID: \"afad0966-8385-444b-9eed-8418c0a49b2a\") " Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.124795 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/afad0966-8385-444b-9eed-8418c0a49b2a-public-tls-certs\") pod \"afad0966-8385-444b-9eed-8418c0a49b2a\" (UID: \"afad0966-8385-444b-9eed-8418c0a49b2a\") " Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.124872 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/afad0966-8385-444b-9eed-8418c0a49b2a-log-httpd\") pod \"afad0966-8385-444b-9eed-8418c0a49b2a\" (UID: \"afad0966-8385-444b-9eed-8418c0a49b2a\") " Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.124896 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afad0966-8385-444b-9eed-8418c0a49b2a-config-data\") pod \"afad0966-8385-444b-9eed-8418c0a49b2a\" (UID: \"afad0966-8385-444b-9eed-8418c0a49b2a\") " Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.124927 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afad0966-8385-444b-9eed-8418c0a49b2a-combined-ca-bundle\") pod \"afad0966-8385-444b-9eed-8418c0a49b2a\" (UID: \"afad0966-8385-444b-9eed-8418c0a49b2a\") " Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.125044 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/afad0966-8385-444b-9eed-8418c0a49b2a-internal-tls-certs\") pod \"afad0966-8385-444b-9eed-8418c0a49b2a\" (UID: \"afad0966-8385-444b-9eed-8418c0a49b2a\") " Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.125136 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/afad0966-8385-444b-9eed-8418c0a49b2a-run-httpd\") pod \"afad0966-8385-444b-9eed-8418c0a49b2a\" (UID: \"afad0966-8385-444b-9eed-8418c0a49b2a\") " Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.125429 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5fb2bb50-645d-4cbc-973a-0f5c1847ea46-operator-scripts\") pod \"keystone-788e-account-create-update-p27qs\" (UID: \"5fb2bb50-645d-4cbc-973a-0f5c1847ea46\") " pod="openstack/keystone-788e-account-create-update-p27qs" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.125580 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tptlx\" (UniqueName: \"kubernetes.io/projected/5fb2bb50-645d-4cbc-973a-0f5c1847ea46-kube-api-access-tptlx\") pod \"keystone-788e-account-create-update-p27qs\" (UID: \"5fb2bb50-645d-4cbc-973a-0f5c1847ea46\") " pod="openstack/keystone-788e-account-create-update-p27qs" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.126314 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/afad0966-8385-444b-9eed-8418c0a49b2a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "afad0966-8385-444b-9eed-8418c0a49b2a" (UID: "afad0966-8385-444b-9eed-8418c0a49b2a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.143953 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/afad0966-8385-444b-9eed-8418c0a49b2a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "afad0966-8385-444b-9eed-8418c0a49b2a" (UID: "afad0966-8385-444b-9eed-8418c0a49b2a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.147316 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afad0966-8385-444b-9eed-8418c0a49b2a-kube-api-access-wl4jq" (OuterVolumeSpecName: "kube-api-access-wl4jq") pod "afad0966-8385-444b-9eed-8418c0a49b2a" (UID: "afad0966-8385-444b-9eed-8418c0a49b2a"). InnerVolumeSpecName "kube-api-access-wl4jq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.149081 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-85dcb76789-v5z7d"] Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.149304 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/keystone-85dcb76789-v5z7d" podUID="597b6841-5a72-4d8d-b2a6-dec279d628d0" containerName="keystone-api" containerID="cri-o://f6abe0f83c7bb707281a5925c06f33a9019ba1df8b444b347dc26031613596ff" gracePeriod=30 Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.149416 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afad0966-8385-444b-9eed-8418c0a49b2a-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "afad0966-8385-444b-9eed-8418c0a49b2a" (UID: "afad0966-8385-444b-9eed-8418c0a49b2a"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.165042 5003 generic.go:334] "Generic (PLEG): container finished" podID="e9f73829-d0a1-4e4d-8f5a-755d63ce1caa" containerID="7b0afbed514d598037465207941500c8d671c3e849c508673ddb681f2032cb98" exitCode=0 Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.165161 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa","Type":"ContainerDied","Data":"7b0afbed514d598037465207941500c8d671c3e849c508673ddb681f2032cb98"} Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.230417 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tptlx\" (UniqueName: \"kubernetes.io/projected/5fb2bb50-645d-4cbc-973a-0f5c1847ea46-kube-api-access-tptlx\") pod \"keystone-788e-account-create-update-p27qs\" (UID: \"5fb2bb50-645d-4cbc-973a-0f5c1847ea46\") " pod="openstack/keystone-788e-account-create-update-p27qs" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.230518 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5fb2bb50-645d-4cbc-973a-0f5c1847ea46-operator-scripts\") pod \"keystone-788e-account-create-update-p27qs\" (UID: \"5fb2bb50-645d-4cbc-973a-0f5c1847ea46\") " pod="openstack/keystone-788e-account-create-update-p27qs" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.230613 5003 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/afad0966-8385-444b-9eed-8418c0a49b2a-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.230631 5003 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/afad0966-8385-444b-9eed-8418c0a49b2a-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.230642 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wl4jq\" (UniqueName: \"kubernetes.io/projected/afad0966-8385-444b-9eed-8418c0a49b2a-kube-api-access-wl4jq\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.230656 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/afad0966-8385-444b-9eed-8418c0a49b2a-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:24.230733 5003 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:24.230791 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fb2bb50-645d-4cbc-973a-0f5c1847ea46-operator-scripts podName:5fb2bb50-645d-4cbc-973a-0f5c1847ea46 nodeName:}" failed. No retries permitted until 2026-01-04 12:13:24.730772464 +0000 UTC m=+1520.203802305 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/5fb2bb50-645d-4cbc-973a-0f5c1847ea46-operator-scripts") pod "keystone-788e-account-create-update-p27qs" (UID: "5fb2bb50-645d-4cbc-973a-0f5c1847ea46") : configmap "openstack-scripts" not found Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.246696 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5f14-account-create-update-xsz5w" event={"ID":"c304f0e5-d232-4549-ab8f-f11f5008f903","Type":"ContainerStarted","Data":"aaa8c43505e9d0add544ba10a29ea47974d00beb2a4add48ce91d96914b38c40"} Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:24.248340 5003 projected.go:194] Error preparing data for projected volume kube-api-access-tptlx for pod openstack/keystone-788e-account-create-update-p27qs: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:24.248432 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/5fb2bb50-645d-4cbc-973a-0f5c1847ea46-kube-api-access-tptlx podName:5fb2bb50-645d-4cbc-973a-0f5c1847ea46 nodeName:}" failed. No retries permitted until 2026-01-04 12:13:24.748405335 +0000 UTC m=+1520.221435176 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-tptlx" (UniqueName: "kubernetes.io/projected/5fb2bb50-645d-4cbc-973a-0f5c1847ea46-kube-api-access-tptlx") pod "keystone-788e-account-create-update-p27qs" (UID: "5fb2bb50-645d-4cbc-973a-0f5c1847ea46") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.260255 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.269001 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6885ccbc9-jntqm" event={"ID":"afad0966-8385-444b-9eed-8418c0a49b2a","Type":"ContainerDied","Data":"ea536b2079f8710d9f7de8266986bc2d261d9e8bd785f2f18c978054d29b2b31"} Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.269248 5003 scope.go:117] "RemoveContainer" containerID="7f17c88fe39230076ac013b10c5ff4f44f766a32bb39a15570523798fd0cee22" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.269435 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6885ccbc9-jntqm" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.295483 5003 generic.go:334] "Generic (PLEG): container finished" podID="9f2d1a12-b4ce-48c2-88b0-5ae881760963" containerID="fa64a3fdaea3987586e9a4f1f0d99812e75c6f392e9589b1d61e5b244d0755f5" exitCode=1 Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.295586 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-kn44b" event={"ID":"9f2d1a12-b4ce-48c2-88b0-5ae881760963","Type":"ContainerDied","Data":"fa64a3fdaea3987586e9a4f1f0d99812e75c6f392e9589b1d61e5b244d0755f5"} Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.296611 5003 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/root-account-create-update-kn44b" secret="" err="secret \"galera-openstack-dockercfg-v7gwm\" not found" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.296704 5003 scope.go:117] "RemoveContainer" containerID="fa64a3fdaea3987586e9a4f1f0d99812e75c6f392e9589b1d61e5b244d0755f5" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.313930 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/placement-5569cb5574-jt6r6" podUID="0ebba05f-e935-404f-85c0-4bd28a6afd28" containerName="placement-log" probeResult="failure" output="Get \"https://10.217.0.153:8778/\": read tcp 10.217.0.2:34064->10.217.0.153:8778: read: connection reset by peer" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.314222 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/placement-5569cb5574-jt6r6" podUID="0ebba05f-e935-404f-85c0-4bd28a6afd28" containerName="placement-api" probeResult="failure" output="Get \"https://10.217.0.153:8778/\": read tcp 10.217.0.2:34062->10.217.0.153:8778: read: connection reset by peer" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.315567 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afad0966-8385-444b-9eed-8418c0a49b2a-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "afad0966-8385-444b-9eed-8418c0a49b2a" (UID: "afad0966-8385-444b-9eed-8418c0a49b2a"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.322337 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afad0966-8385-444b-9eed-8418c0a49b2a-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "afad0966-8385-444b-9eed-8418c0a49b2a" (UID: "afad0966-8385-444b-9eed-8418c0a49b2a"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.332509 5003 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/afad0966-8385-444b-9eed-8418c0a49b2a-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.332536 5003 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/afad0966-8385-444b-9eed-8418c0a49b2a-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.337265 5003 generic.go:334] "Generic (PLEG): container finished" podID="90ff3d24-4688-4c94-b5d6-c408e74c28ae" containerID="db90af2bbb86e3f491229ad487c735393946380d939a9f6fd713ad64285edc75" exitCode=2 Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.337862 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"90ff3d24-4688-4c94-b5d6-c408e74c28ae","Type":"ContainerDied","Data":"db90af2bbb86e3f491229ad487c735393946380d939a9f6fd713ad64285edc75"} Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.353449 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afad0966-8385-444b-9eed-8418c0a49b2a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "afad0966-8385-444b-9eed-8418c0a49b2a" (UID: "afad0966-8385-444b-9eed-8418c0a49b2a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.364777 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-xfd9f"] Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.376791 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-788e-account-create-update-p27qs"] Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:24.377983 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-tptlx operator-scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/keystone-788e-account-create-update-p27qs" podUID="5fb2bb50-645d-4cbc-973a-0f5c1847ea46" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.384110 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-xfd9f"] Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.391193 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-kn44b"] Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.430796 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afad0966-8385-444b-9eed-8418c0a49b2a-config-data" (OuterVolumeSpecName: "config-data") pod "afad0966-8385-444b-9eed-8418c0a49b2a" (UID: "afad0966-8385-444b-9eed-8418c0a49b2a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.440579 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afad0966-8385-444b-9eed-8418c0a49b2a-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.440623 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afad0966-8385-444b-9eed-8418c0a49b2a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:24.441718 5003 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:24.441787 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9f2d1a12-b4ce-48c2-88b0-5ae881760963-operator-scripts podName:9f2d1a12-b4ce-48c2-88b0-5ae881760963 nodeName:}" failed. No retries permitted until 2026-01-04 12:13:24.941766149 +0000 UTC m=+1520.414795990 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/9f2d1a12-b4ce-48c2-88b0-5ae881760963-operator-scripts") pod "root-account-create-update-kn44b" (UID: "9f2d1a12-b4ce-48c2-88b0-5ae881760963") : configmap "openstack-scripts" not found Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.456572 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.479763 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.541663 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") " Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.541779 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-combined-ca-bundle\") pod \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") " Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.541819 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/890e99fd-959d-4946-9716-acfe78278964-combined-ca-bundle\") pod \"890e99fd-959d-4946-9716-acfe78278964\" (UID: \"890e99fd-959d-4946-9716-acfe78278964\") " Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.541861 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-config-data-default\") pod \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") " Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.541917 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-config-data-generated\") pod \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") " Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.541961 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/890e99fd-959d-4946-9716-acfe78278964-config-data\") pod \"890e99fd-959d-4946-9716-acfe78278964\" (UID: \"890e99fd-959d-4946-9716-acfe78278964\") " Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.542144 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-70b4-account-create-update-gwtlx" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.543168 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "e9f73829-d0a1-4e4d-8f5a-755d63ce1caa" (UID: "e9f73829-d0a1-4e4d-8f5a-755d63ce1caa"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.543623 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "e9f73829-d0a1-4e4d-8f5a-755d63ce1caa" (UID: "e9f73829-d0a1-4e4d-8f5a-755d63ce1caa"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.543890 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dpdr6\" (UniqueName: \"kubernetes.io/projected/890e99fd-959d-4946-9716-acfe78278964-kube-api-access-dpdr6\") pod \"890e99fd-959d-4946-9716-acfe78278964\" (UID: \"890e99fd-959d-4946-9716-acfe78278964\") " Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.543940 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-operator-scripts\") pod \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") " Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.543979 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/890e99fd-959d-4946-9716-acfe78278964-nova-novncproxy-tls-certs\") pod \"890e99fd-959d-4946-9716-acfe78278964\" (UID: \"890e99fd-959d-4946-9716-acfe78278964\") " Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.544057 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-galera-tls-certs\") pod \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") " Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.544115 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-kolla-config\") pod \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") " Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.544208 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-npvx4\" (UniqueName: \"kubernetes.io/projected/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-kube-api-access-npvx4\") pod \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\" (UID: \"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa\") " Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.544237 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/890e99fd-959d-4946-9716-acfe78278964-vencrypt-tls-certs\") pod \"890e99fd-959d-4946-9716-acfe78278964\" (UID: \"890e99fd-959d-4946-9716-acfe78278964\") " Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.544939 5003 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.544956 5003 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.555334 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "e9f73829-d0a1-4e4d-8f5a-755d63ce1caa" (UID: "e9f73829-d0a1-4e4d-8f5a-755d63ce1caa"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.572814 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e9f73829-d0a1-4e4d-8f5a-755d63ce1caa" (UID: "e9f73829-d0a1-4e4d-8f5a-755d63ce1caa"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.581316 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/890e99fd-959d-4946-9716-acfe78278964-kube-api-access-dpdr6" (OuterVolumeSpecName: "kube-api-access-dpdr6") pod "890e99fd-959d-4946-9716-acfe78278964" (UID: "890e99fd-959d-4946-9716-acfe78278964"). InnerVolumeSpecName "kube-api-access-dpdr6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.600837 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-kube-api-access-npvx4" (OuterVolumeSpecName: "kube-api-access-npvx4") pod "e9f73829-d0a1-4e4d-8f5a-755d63ce1caa" (UID: "e9f73829-d0a1-4e4d-8f5a-755d63ce1caa"). InnerVolumeSpecName "kube-api-access-npvx4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.605719 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "mysql-db") pod "e9f73829-d0a1-4e4d-8f5a-755d63ce1caa" (UID: "e9f73829-d0a1-4e4d-8f5a-755d63ce1caa"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.615478 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="714823a9-560a-496c-b975-2db1099ad873" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.178:9292/healthcheck\": read tcp 10.217.0.2:34236->10.217.0.178:9292: read: connection reset by peer" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.615876 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="714823a9-560a-496c-b975-2db1099ad873" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.178:9292/healthcheck\": read tcp 10.217.0.2:34248->10.217.0.178:9292: read: connection reset by peer" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.639741 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/890e99fd-959d-4946-9716-acfe78278964-config-data" (OuterVolumeSpecName: "config-data") pod "890e99fd-959d-4946-9716-acfe78278964" (UID: "890e99fd-959d-4946-9716-acfe78278964"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.640052 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/890e99fd-959d-4946-9716-acfe78278964-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "890e99fd-959d-4946-9716-acfe78278964" (UID: "890e99fd-959d-4946-9716-acfe78278964"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.645177 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "e9f73829-d0a1-4e4d-8f5a-755d63ce1caa" (UID: "e9f73829-d0a1-4e4d-8f5a-755d63ce1caa"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.645988 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f54e309-ce75-4ab4-8fad-f7c1ad1c9cb7-operator-scripts\") pod \"2f54e309-ce75-4ab4-8fad-f7c1ad1c9cb7\" (UID: \"2f54e309-ce75-4ab4-8fad-f7c1ad1c9cb7\") " Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:24.646005 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 716e1877196d93764346bb1a22d3036cf3d4fe6efcbf6126e0f1878e472f9fc4 is running failed: container process not found" containerID="716e1877196d93764346bb1a22d3036cf3d4fe6efcbf6126e0f1878e472f9fc4" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.646123 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wblkz\" (UniqueName: \"kubernetes.io/projected/2f54e309-ce75-4ab4-8fad-f7c1ad1c9cb7-kube-api-access-wblkz\") pod \"2f54e309-ce75-4ab4-8fad-f7c1ad1c9cb7\" (UID: \"2f54e309-ce75-4ab4-8fad-f7c1ad1c9cb7\") " Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.646739 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f54e309-ce75-4ab4-8fad-f7c1ad1c9cb7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2f54e309-ce75-4ab4-8fad-f7c1ad1c9cb7" (UID: "2f54e309-ce75-4ab4-8fad-f7c1ad1c9cb7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.646786 5003 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.647130 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/890e99fd-959d-4946-9716-acfe78278964-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.647201 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/890e99fd-959d-4946-9716-acfe78278964-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.647266 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dpdr6\" (UniqueName: \"kubernetes.io/projected/890e99fd-959d-4946-9716-acfe78278964-kube-api-access-dpdr6\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.647322 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.647485 5003 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.647561 5003 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.647619 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-npvx4\" (UniqueName: \"kubernetes.io/projected/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-kube-api-access-npvx4\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.647501 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e9f73829-d0a1-4e4d-8f5a-755d63ce1caa" (UID: "e9f73829-d0a1-4e4d-8f5a-755d63ce1caa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:24.647127 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 716e1877196d93764346bb1a22d3036cf3d4fe6efcbf6126e0f1878e472f9fc4 is running failed: container process not found" containerID="716e1877196d93764346bb1a22d3036cf3d4fe6efcbf6126e0f1878e472f9fc4" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.647339 5003 scope.go:117] "RemoveContainer" containerID="6615326903f6990ecae76e7fe834c4ecc3fe23051337c2d5af2308f338730040" Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:24.648378 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 716e1877196d93764346bb1a22d3036cf3d4fe6efcbf6126e0f1878e472f9fc4 is running failed: container process not found" containerID="716e1877196d93764346bb1a22d3036cf3d4fe6efcbf6126e0f1878e472f9fc4" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:24.648447 5003 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 716e1877196d93764346bb1a22d3036cf3d4fe6efcbf6126e0f1878e472f9fc4 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="28122a6a-8b54-4ff3-9092-a1f7439a35cf" containerName="nova-cell0-conductor-conductor" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.653638 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f54e309-ce75-4ab4-8fad-f7c1ad1c9cb7-kube-api-access-wblkz" (OuterVolumeSpecName: "kube-api-access-wblkz") pod "2f54e309-ce75-4ab4-8fad-f7c1ad1c9cb7" (UID: "2f54e309-ce75-4ab4-8fad-f7c1ad1c9cb7"). InnerVolumeSpecName "kube-api-access-wblkz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.662935 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/890e99fd-959d-4946-9716-acfe78278964-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "890e99fd-959d-4946-9716-acfe78278964" (UID: "890e99fd-959d-4946-9716-acfe78278964"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.671770 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-galera-0" podUID="30c47e0c-622e-4f66-a71d-f7e6cc0f23d9" containerName="galera" containerID="cri-o://c51f66616df30830ce3cae7d185eefe24cf5be0fe4db029c3e5c213dfd1ab368" gracePeriod=30 Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.706705 5003 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.710242 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/890e99fd-959d-4946-9716-acfe78278964-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "890e99fd-959d-4946-9716-acfe78278964" (UID: "890e99fd-959d-4946-9716-acfe78278964"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.750082 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tptlx\" (UniqueName: \"kubernetes.io/projected/5fb2bb50-645d-4cbc-973a-0f5c1847ea46-kube-api-access-tptlx\") pod \"keystone-788e-account-create-update-p27qs\" (UID: \"5fb2bb50-645d-4cbc-973a-0f5c1847ea46\") " pod="openstack/keystone-788e-account-create-update-p27qs" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.750177 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5fb2bb50-645d-4cbc-973a-0f5c1847ea46-operator-scripts\") pod \"keystone-788e-account-create-update-p27qs\" (UID: \"5fb2bb50-645d-4cbc-973a-0f5c1847ea46\") " pod="openstack/keystone-788e-account-create-update-p27qs" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.750341 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f54e309-ce75-4ab4-8fad-f7c1ad1c9cb7-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.750354 5003 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/890e99fd-959d-4946-9716-acfe78278964-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.750368 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wblkz\" (UniqueName: \"kubernetes.io/projected/2f54e309-ce75-4ab4-8fad-f7c1ad1c9cb7-kube-api-access-wblkz\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.750377 5003 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/890e99fd-959d-4946-9716-acfe78278964-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.750386 5003 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.750396 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:24.750469 5003 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:24.750536 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fb2bb50-645d-4cbc-973a-0f5c1847ea46-operator-scripts podName:5fb2bb50-645d-4cbc-973a-0f5c1847ea46 nodeName:}" failed. No retries permitted until 2026-01-04 12:13:25.750520208 +0000 UTC m=+1521.223550039 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/5fb2bb50-645d-4cbc-973a-0f5c1847ea46-operator-scripts") pod "keystone-788e-account-create-update-p27qs" (UID: "5fb2bb50-645d-4cbc-973a-0f5c1847ea46") : configmap "openstack-scripts" not found Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:24.754788 5003 projected.go:194] Error preparing data for projected volume kube-api-access-tptlx for pod openstack/keystone-788e-account-create-update-p27qs: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:24.754939 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/5fb2bb50-645d-4cbc-973a-0f5c1847ea46-kube-api-access-tptlx podName:5fb2bb50-645d-4cbc-973a-0f5c1847ea46 nodeName:}" failed. No retries permitted until 2026-01-04 12:13:25.754918503 +0000 UTC m=+1521.227948344 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-tptlx" (UniqueName: "kubernetes.io/projected/5fb2bb50-645d-4cbc-973a-0f5c1847ea46-kube-api-access-tptlx") pod "keystone-788e-account-create-update-p27qs" (UID: "5fb2bb50-645d-4cbc-973a-0f5c1847ea46") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.793768 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.851761 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90ff3d24-4688-4c94-b5d6-c408e74c28ae-combined-ca-bundle\") pod \"90ff3d24-4688-4c94-b5d6-c408e74c28ae\" (UID: \"90ff3d24-4688-4c94-b5d6-c408e74c28ae\") " Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.851836 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/90ff3d24-4688-4c94-b5d6-c408e74c28ae-kube-state-metrics-tls-config\") pod \"90ff3d24-4688-4c94-b5d6-c408e74c28ae\" (UID: \"90ff3d24-4688-4c94-b5d6-c408e74c28ae\") " Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.851874 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5zmjl\" (UniqueName: \"kubernetes.io/projected/90ff3d24-4688-4c94-b5d6-c408e74c28ae-kube-api-access-5zmjl\") pod \"90ff3d24-4688-4c94-b5d6-c408e74c28ae\" (UID: \"90ff3d24-4688-4c94-b5d6-c408e74c28ae\") " Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.851981 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/90ff3d24-4688-4c94-b5d6-c408e74c28ae-kube-state-metrics-tls-certs\") pod \"90ff3d24-4688-4c94-b5d6-c408e74c28ae\" (UID: \"90ff3d24-4688-4c94-b5d6-c408e74c28ae\") " Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.862897 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09e11bbd-46d5-4fa2-97f1-75b8cdd3b263" path="/var/lib/kubelet/pods/09e11bbd-46d5-4fa2-97f1-75b8cdd3b263/volumes" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.863941 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="226781be-ff54-487b-9180-abaf7d0eda00" path="/var/lib/kubelet/pods/226781be-ff54-487b-9180-abaf7d0eda00/volumes" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.865071 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3121d3df-082a-434a-8bad-4dce3e7d9b09" path="/var/lib/kubelet/pods/3121d3df-082a-434a-8bad-4dce3e7d9b09/volumes" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.865862 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50703d14-b53d-4ef3-8b2a-790b55e0c5d1" path="/var/lib/kubelet/pods/50703d14-b53d-4ef3-8b2a-790b55e0c5d1/volumes" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.867441 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67adf34a-962a-435b-8e35-ae1387c097b6" path="/var/lib/kubelet/pods/67adf34a-962a-435b-8e35-ae1387c097b6/volumes" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.868276 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="741d1d6f-4c11-4352-ba73-c8a13a465c95" path="/var/lib/kubelet/pods/741d1d6f-4c11-4352-ba73-c8a13a465c95/volumes" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.869485 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="988b3ee2-147b-4ebc-9d31-42bdaf144bc5" path="/var/lib/kubelet/pods/988b3ee2-147b-4ebc-9d31-42bdaf144bc5/volumes" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.870704 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5c96797-3987-489a-8ce2-510caa11262c" path="/var/lib/kubelet/pods/c5c96797-3987-489a-8ce2-510caa11262c/volumes" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.871831 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c88e1443-25c4-4e67-83d0-e43cef2b2e5c" path="/var/lib/kubelet/pods/c88e1443-25c4-4e67-83d0-e43cef2b2e5c/volumes" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.877761 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-6885ccbc9-jntqm"] Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.877936 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-proxy-6885ccbc9-jntqm"] Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.905382 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90ff3d24-4688-4c94-b5d6-c408e74c28ae-kube-api-access-5zmjl" (OuterVolumeSpecName: "kube-api-access-5zmjl") pod "90ff3d24-4688-4c94-b5d6-c408e74c28ae" (UID: "90ff3d24-4688-4c94-b5d6-c408e74c28ae"). InnerVolumeSpecName "kube-api-access-5zmjl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:24 crc kubenswrapper[5003]: I0104 12:13:24.954384 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5zmjl\" (UniqueName: \"kubernetes.io/projected/90ff3d24-4688-4c94-b5d6-c408e74c28ae-kube-api-access-5zmjl\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:24.954477 5003 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Jan 04 12:13:24 crc kubenswrapper[5003]: E0104 12:13:24.954526 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9f2d1a12-b4ce-48c2-88b0-5ae881760963-operator-scripts podName:9f2d1a12-b4ce-48c2-88b0-5ae881760963 nodeName:}" failed. No retries permitted until 2026-01-04 12:13:25.95451086 +0000 UTC m=+1521.427540701 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/9f2d1a12-b4ce-48c2-88b0-5ae881760963-operator-scripts") pod "root-account-create-update-kn44b" (UID: "9f2d1a12-b4ce-48c2-88b0-5ae881760963") : configmap "openstack-scripts" not found Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.005197 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90ff3d24-4688-4c94-b5d6-c408e74c28ae-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "90ff3d24-4688-4c94-b5d6-c408e74c28ae" (UID: "90ff3d24-4688-4c94-b5d6-c408e74c28ae"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.009687 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90ff3d24-4688-4c94-b5d6-c408e74c28ae-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "90ff3d24-4688-4c94-b5d6-c408e74c28ae" (UID: "90ff3d24-4688-4c94-b5d6-c408e74c28ae"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.013464 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-66fdccd748-zk2qt" podUID="b53805c6-4e15-4580-a60d-1f0c9c1fcef6" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.166:9311/healthcheck\": read tcp 10.217.0.2:57632->10.217.0.166:9311: read: connection reset by peer" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.013754 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5f14-account-create-update-xsz5w" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.013983 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-66fdccd748-zk2qt" podUID="b53805c6-4e15-4580-a60d-1f0c9c1fcef6" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.166:9311/healthcheck\": read tcp 10.217.0.2:57634->10.217.0.166:9311: read: connection reset by peer" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.059672 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c304f0e5-d232-4549-ab8f-f11f5008f903-operator-scripts\") pod \"c304f0e5-d232-4549-ab8f-f11f5008f903\" (UID: \"c304f0e5-d232-4549-ab8f-f11f5008f903\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.059917 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jmjcr\" (UniqueName: \"kubernetes.io/projected/c304f0e5-d232-4549-ab8f-f11f5008f903-kube-api-access-jmjcr\") pod \"c304f0e5-d232-4549-ab8f-f11f5008f903\" (UID: \"c304f0e5-d232-4549-ab8f-f11f5008f903\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.060373 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c304f0e5-d232-4549-ab8f-f11f5008f903-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c304f0e5-d232-4549-ab8f-f11f5008f903" (UID: "c304f0e5-d232-4549-ab8f-f11f5008f903"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.060676 5003 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/90ff3d24-4688-4c94-b5d6-c408e74c28ae-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.060698 5003 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/90ff3d24-4688-4c94-b5d6-c408e74c28ae-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.060712 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c304f0e5-d232-4549-ab8f-f11f5008f903-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.063926 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90ff3d24-4688-4c94-b5d6-c408e74c28ae-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "90ff3d24-4688-4c94-b5d6-c408e74c28ae" (UID: "90ff3d24-4688-4c94-b5d6-c408e74c28ae"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.068894 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c304f0e5-d232-4549-ab8f-f11f5008f903-kube-api-access-jmjcr" (OuterVolumeSpecName: "kube-api-access-jmjcr") pod "c304f0e5-d232-4549-ab8f-f11f5008f903" (UID: "c304f0e5-d232-4549-ab8f-f11f5008f903"). InnerVolumeSpecName "kube-api-access-jmjcr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.102731 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-553f-account-create-update-kjhcr" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.103966 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-daa1-account-create-update-m44v2" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.162365 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fba35580-43f2-4d96-8c52-6da2b5fdbd94-operator-scripts\") pod \"fba35580-43f2-4d96-8c52-6da2b5fdbd94\" (UID: \"fba35580-43f2-4d96-8c52-6da2b5fdbd94\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.162419 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pwfgh\" (UniqueName: \"kubernetes.io/projected/28114f85-1d9b-4e71-be5f-d721f06c70dc-kube-api-access-pwfgh\") pod \"28114f85-1d9b-4e71-be5f-d721f06c70dc\" (UID: \"28114f85-1d9b-4e71-be5f-d721f06c70dc\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.162547 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/28114f85-1d9b-4e71-be5f-d721f06c70dc-operator-scripts\") pod \"28114f85-1d9b-4e71-be5f-d721f06c70dc\" (UID: \"28114f85-1d9b-4e71-be5f-d721f06c70dc\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.162611 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmxdk\" (UniqueName: \"kubernetes.io/projected/fba35580-43f2-4d96-8c52-6da2b5fdbd94-kube-api-access-nmxdk\") pod \"fba35580-43f2-4d96-8c52-6da2b5fdbd94\" (UID: \"fba35580-43f2-4d96-8c52-6da2b5fdbd94\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.163007 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90ff3d24-4688-4c94-b5d6-c408e74c28ae-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.163042 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jmjcr\" (UniqueName: \"kubernetes.io/projected/c304f0e5-d232-4549-ab8f-f11f5008f903-kube-api-access-jmjcr\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.163812 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/28114f85-1d9b-4e71-be5f-d721f06c70dc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "28114f85-1d9b-4e71-be5f-d721f06c70dc" (UID: "28114f85-1d9b-4e71-be5f-d721f06c70dc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.163884 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fba35580-43f2-4d96-8c52-6da2b5fdbd94-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fba35580-43f2-4d96-8c52-6da2b5fdbd94" (UID: "fba35580-43f2-4d96-8c52-6da2b5fdbd94"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.167347 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fba35580-43f2-4d96-8c52-6da2b5fdbd94-kube-api-access-nmxdk" (OuterVolumeSpecName: "kube-api-access-nmxdk") pod "fba35580-43f2-4d96-8c52-6da2b5fdbd94" (UID: "fba35580-43f2-4d96-8c52-6da2b5fdbd94"). InnerVolumeSpecName "kube-api-access-nmxdk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.167430 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28114f85-1d9b-4e71-be5f-d721f06c70dc-kube-api-access-pwfgh" (OuterVolumeSpecName: "kube-api-access-pwfgh") pod "28114f85-1d9b-4e71-be5f-d721f06c70dc" (UID: "28114f85-1d9b-4e71-be5f-d721f06c70dc"). InnerVolumeSpecName "kube-api-access-pwfgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:25 crc kubenswrapper[5003]: E0104 12:13:25.167746 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3e03c9994a07fa7e18a42bd5c17e591979717aa51ecb5df6f1ce553e5a854fff" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 04 12:13:25 crc kubenswrapper[5003]: E0104 12:13:25.175753 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3e03c9994a07fa7e18a42bd5c17e591979717aa51ecb5df6f1ce553e5a854fff" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 04 12:13:25 crc kubenswrapper[5003]: E0104 12:13:25.194461 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3e03c9994a07fa7e18a42bd5c17e591979717aa51ecb5df6f1ce553e5a854fff" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 04 12:13:25 crc kubenswrapper[5003]: E0104 12:13:25.194566 5003 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="653b2e11-95ca-46e7-b28c-a1170d7a180b" containerName="ovn-northd" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.267717 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pwfgh\" (UniqueName: \"kubernetes.io/projected/28114f85-1d9b-4e71-be5f-d721f06c70dc-kube-api-access-pwfgh\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.267763 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/28114f85-1d9b-4e71-be5f-d721f06c70dc-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.267774 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmxdk\" (UniqueName: \"kubernetes.io/projected/fba35580-43f2-4d96-8c52-6da2b5fdbd94-kube-api-access-nmxdk\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.267783 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fba35580-43f2-4d96-8c52-6da2b5fdbd94-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.354665 5003 generic.go:334] "Generic (PLEG): container finished" podID="43c1199f-e162-4062-a972-417afa58eaa6" containerID="193d03f2dc8b6a55e957d530e54441551f102796455d35a76dcd721cbba41982" exitCode=0 Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.354721 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"43c1199f-e162-4062-a972-417afa58eaa6","Type":"ContainerDied","Data":"193d03f2dc8b6a55e957d530e54441551f102796455d35a76dcd721cbba41982"} Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.358518 5003 generic.go:334] "Generic (PLEG): container finished" podID="0ebba05f-e935-404f-85c0-4bd28a6afd28" containerID="e10ea013e6db1ec608b48d073081c9a1e3b0565542f8409e8438d07885d6f975" exitCode=0 Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.358586 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5569cb5574-jt6r6" event={"ID":"0ebba05f-e935-404f-85c0-4bd28a6afd28","Type":"ContainerDied","Data":"e10ea013e6db1ec608b48d073081c9a1e3b0565542f8409e8438d07885d6f975"} Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.358780 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5569cb5574-jt6r6" event={"ID":"0ebba05f-e935-404f-85c0-4bd28a6afd28","Type":"ContainerDied","Data":"23e4d9f262f1f06ccd229700f7fd9eccf5dd9ad583a531de69d8baaa59558f7e"} Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.358851 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="23e4d9f262f1f06ccd229700f7fd9eccf5dd9ad583a531de69d8baaa59558f7e" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.361279 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"e9f73829-d0a1-4e4d-8f5a-755d63ce1caa","Type":"ContainerDied","Data":"3b572644d222a01c860c65908d6859ffdf8d098dfbaafcc99069c8317edcd524"} Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.361343 5003 scope.go:117] "RemoveContainer" containerID="7b0afbed514d598037465207941500c8d671c3e849c508673ddb681f2032cb98" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.361461 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.363905 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5f14-account-create-update-xsz5w" event={"ID":"c304f0e5-d232-4549-ab8f-f11f5008f903","Type":"ContainerDied","Data":"aaa8c43505e9d0add544ba10a29ea47974d00beb2a4add48ce91d96914b38c40"} Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.364069 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5f14-account-create-update-xsz5w" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.367519 5003 generic.go:334] "Generic (PLEG): container finished" podID="5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42" containerID="2d1f1d056b61194569686724f2003dba8a8e5a0d07b9d3c97b835d48ef2f3c01" exitCode=0 Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.367578 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42","Type":"ContainerDied","Data":"2d1f1d056b61194569686724f2003dba8a8e5a0d07b9d3c97b835d48ef2f3c01"} Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.371772 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2c899132-ee49-4ab3-89ea-95f0bfcb71ab","Type":"ContainerDied","Data":"46919056bf4b2db61c4b851726637476ed5f878bddbce873c5a30520c666773e"} Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.371757 5003 generic.go:334] "Generic (PLEG): container finished" podID="2c899132-ee49-4ab3-89ea-95f0bfcb71ab" containerID="46919056bf4b2db61c4b851726637476ed5f878bddbce873c5a30520c666773e" exitCode=0 Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.378736 5003 generic.go:334] "Generic (PLEG): container finished" podID="274fbbf3-b927-408e-9594-946f6ea71638" containerID="7141902b674c0aba97218ef22d6317c2792d0dbefb479c305e2f864785706754" exitCode=0 Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.378835 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"274fbbf3-b927-408e-9594-946f6ea71638","Type":"ContainerDied","Data":"7141902b674c0aba97218ef22d6317c2792d0dbefb479c305e2f864785706754"} Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.378857 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"274fbbf3-b927-408e-9594-946f6ea71638","Type":"ContainerDied","Data":"dae14417523a45eccdb53cb7dc7ce6b65dc4fc384cb328271d9f37fc0ac3aefd"} Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.378869 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dae14417523a45eccdb53cb7dc7ce6b65dc4fc384cb328271d9f37fc0ac3aefd" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.381297 5003 generic.go:334] "Generic (PLEG): container finished" podID="b53805c6-4e15-4580-a60d-1f0c9c1fcef6" containerID="dd895a8362663704ae95aaa9cadf69c10b404ecae29d602906e53e472c16265f" exitCode=0 Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.381325 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-66fdccd748-zk2qt" event={"ID":"b53805c6-4e15-4580-a60d-1f0c9c1fcef6","Type":"ContainerDied","Data":"dd895a8362663704ae95aaa9cadf69c10b404ecae29d602906e53e472c16265f"} Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.383852 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-70b4-account-create-update-gwtlx" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.383875 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-70b4-account-create-update-gwtlx" event={"ID":"2f54e309-ce75-4ab4-8fad-f7c1ad1c9cb7","Type":"ContainerDied","Data":"9433b5ede5eb6c7e4918ba2202d065b8cf4e5b7dc8b9c96f7bacc4e830095f73"} Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.387975 5003 generic.go:334] "Generic (PLEG): container finished" podID="714823a9-560a-496c-b975-2db1099ad873" containerID="5b23433759eb714853942994a37054e568cfd11f999d8e6d3f6f86c33c3787c4" exitCode=0 Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.388064 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"714823a9-560a-496c-b975-2db1099ad873","Type":"ContainerDied","Data":"5b23433759eb714853942994a37054e568cfd11f999d8e6d3f6f86c33c3787c4"} Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.390625 5003 generic.go:334] "Generic (PLEG): container finished" podID="28122a6a-8b54-4ff3-9092-a1f7439a35cf" containerID="716e1877196d93764346bb1a22d3036cf3d4fe6efcbf6126e0f1878e472f9fc4" exitCode=0 Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.390672 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"28122a6a-8b54-4ff3-9092-a1f7439a35cf","Type":"ContainerDied","Data":"716e1877196d93764346bb1a22d3036cf3d4fe6efcbf6126e0f1878e472f9fc4"} Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.392176 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-daa1-account-create-update-m44v2" event={"ID":"fba35580-43f2-4d96-8c52-6da2b5fdbd94","Type":"ContainerDied","Data":"0a9e72a8af83b11d4f125c9da398452d30946fab5d7d128c67796709ab93ff32"} Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.392246 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-daa1-account-create-update-m44v2" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.401731 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"890e99fd-959d-4946-9716-acfe78278964","Type":"ContainerDied","Data":"b5ed1b408c59b4ef15d5def5aa2df873918de3c2767b9c6391eb16944810d1fc"} Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.401782 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.404040 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-553f-account-create-update-kjhcr" event={"ID":"28114f85-1d9b-4e71-be5f-d721f06c70dc","Type":"ContainerDied","Data":"9c9a7db0826e756a4e32f1b6a7e9ea4e85bc952ced92dd7c8d3f8d8180631009"} Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.404215 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-553f-account-create-update-kjhcr" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.421833 5003 generic.go:334] "Generic (PLEG): container finished" podID="0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc" containerID="0251462325e49cea189f787676ef9b788f1da3cfefece6459ad475f7a74cd5c1" exitCode=0 Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.421919 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc","Type":"ContainerDied","Data":"0251462325e49cea189f787676ef9b788f1da3cfefece6459ad475f7a74cd5c1"} Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.425104 5003 generic.go:334] "Generic (PLEG): container finished" podID="9f2d1a12-b4ce-48c2-88b0-5ae881760963" containerID="69ed4feade45a6e74281effee1a4d4fac2fb5e3c202ed2e5dd93757020739ae2" exitCode=1 Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.425184 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-kn44b" event={"ID":"9f2d1a12-b4ce-48c2-88b0-5ae881760963","Type":"ContainerDied","Data":"69ed4feade45a6e74281effee1a4d4fac2fb5e3c202ed2e5dd93757020739ae2"} Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.433968 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"90ff3d24-4688-4c94-b5d6-c408e74c28ae","Type":"ContainerDied","Data":"df735e508babebf9dfbafd295f4dda02b9dba20e9b3495a9eb9324e89907ed40"} Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.433996 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.437428 5003 generic.go:334] "Generic (PLEG): container finished" podID="b4c84d72-3209-4925-9eb2-cbebcd1e8ae7" containerID="88cf151cf804256f12f3d9496a2faf5cc39d58a9955cca558c56b93f8e9f6281" exitCode=0 Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.437548 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b4c84d72-3209-4925-9eb2-cbebcd1e8ae7","Type":"ContainerDied","Data":"88cf151cf804256f12f3d9496a2faf5cc39d58a9955cca558c56b93f8e9f6281"} Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.437582 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-788e-account-create-update-p27qs" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.485183 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5569cb5574-jt6r6" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.492694 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.518224 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-788e-account-create-update-p27qs" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.532371 5003 scope.go:117] "RemoveContainer" containerID="2a1ec64e5b0507e28d2e97d71cd1cc575cef24e7a7cb8c96be6fd1e98aabac73" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.579538 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ebba05f-e935-404f-85c0-4bd28a6afd28-public-tls-certs\") pod \"0ebba05f-e935-404f-85c0-4bd28a6afd28\" (UID: \"0ebba05f-e935-404f-85c0-4bd28a6afd28\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.579597 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ebba05f-e935-404f-85c0-4bd28a6afd28-internal-tls-certs\") pod \"0ebba05f-e935-404f-85c0-4bd28a6afd28\" (UID: \"0ebba05f-e935-404f-85c0-4bd28a6afd28\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.579667 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/274fbbf3-b927-408e-9594-946f6ea71638-config-data\") pod \"274fbbf3-b927-408e-9594-946f6ea71638\" (UID: \"274fbbf3-b927-408e-9594-946f6ea71638\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.579694 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmgvl\" (UniqueName: \"kubernetes.io/projected/274fbbf3-b927-408e-9594-946f6ea71638-kube-api-access-dmgvl\") pod \"274fbbf3-b927-408e-9594-946f6ea71638\" (UID: \"274fbbf3-b927-408e-9594-946f6ea71638\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.579724 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/274fbbf3-b927-408e-9594-946f6ea71638-combined-ca-bundle\") pod \"274fbbf3-b927-408e-9594-946f6ea71638\" (UID: \"274fbbf3-b927-408e-9594-946f6ea71638\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.579774 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/274fbbf3-b927-408e-9594-946f6ea71638-kolla-config\") pod \"274fbbf3-b927-408e-9594-946f6ea71638\" (UID: \"274fbbf3-b927-408e-9594-946f6ea71638\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.579803 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ebba05f-e935-404f-85c0-4bd28a6afd28-combined-ca-bundle\") pod \"0ebba05f-e935-404f-85c0-4bd28a6afd28\" (UID: \"0ebba05f-e935-404f-85c0-4bd28a6afd28\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.579833 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/274fbbf3-b927-408e-9594-946f6ea71638-memcached-tls-certs\") pod \"274fbbf3-b927-408e-9594-946f6ea71638\" (UID: \"274fbbf3-b927-408e-9594-946f6ea71638\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.579901 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zz46\" (UniqueName: \"kubernetes.io/projected/0ebba05f-e935-404f-85c0-4bd28a6afd28-kube-api-access-4zz46\") pod \"0ebba05f-e935-404f-85c0-4bd28a6afd28\" (UID: \"0ebba05f-e935-404f-85c0-4bd28a6afd28\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.579984 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ebba05f-e935-404f-85c0-4bd28a6afd28-config-data\") pod \"0ebba05f-e935-404f-85c0-4bd28a6afd28\" (UID: \"0ebba05f-e935-404f-85c0-4bd28a6afd28\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.580035 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ebba05f-e935-404f-85c0-4bd28a6afd28-scripts\") pod \"0ebba05f-e935-404f-85c0-4bd28a6afd28\" (UID: \"0ebba05f-e935-404f-85c0-4bd28a6afd28\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.580092 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0ebba05f-e935-404f-85c0-4bd28a6afd28-logs\") pod \"0ebba05f-e935-404f-85c0-4bd28a6afd28\" (UID: \"0ebba05f-e935-404f-85c0-4bd28a6afd28\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.581100 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ebba05f-e935-404f-85c0-4bd28a6afd28-logs" (OuterVolumeSpecName: "logs") pod "0ebba05f-e935-404f-85c0-4bd28a6afd28" (UID: "0ebba05f-e935-404f-85c0-4bd28a6afd28"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.582058 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/274fbbf3-b927-408e-9594-946f6ea71638-config-data" (OuterVolumeSpecName: "config-data") pod "274fbbf3-b927-408e-9594-946f6ea71638" (UID: "274fbbf3-b927-408e-9594-946f6ea71638"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.582094 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/274fbbf3-b927-408e-9594-946f6ea71638-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "274fbbf3-b927-408e-9594-946f6ea71638" (UID: "274fbbf3-b927-408e-9594-946f6ea71638"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.586627 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/274fbbf3-b927-408e-9594-946f6ea71638-kube-api-access-dmgvl" (OuterVolumeSpecName: "kube-api-access-dmgvl") pod "274fbbf3-b927-408e-9594-946f6ea71638" (UID: "274fbbf3-b927-408e-9594-946f6ea71638"). InnerVolumeSpecName "kube-api-access-dmgvl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.591551 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.608573 5003 scope.go:117] "RemoveContainer" containerID="5c16e890258cb037b15eed8bde5425015cdc0c2f4dd920723e36c9b49ced1ff0" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.624546 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ebba05f-e935-404f-85c0-4bd28a6afd28-scripts" (OuterVolumeSpecName: "scripts") pod "0ebba05f-e935-404f-85c0-4bd28a6afd28" (UID: "0ebba05f-e935-404f-85c0-4bd28a6afd28"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.633340 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ebba05f-e935-404f-85c0-4bd28a6afd28-kube-api-access-4zz46" (OuterVolumeSpecName: "kube-api-access-4zz46") pod "0ebba05f-e935-404f-85c0-4bd28a6afd28" (UID: "0ebba05f-e935-404f-85c0-4bd28a6afd28"). InnerVolumeSpecName "kube-api-access-4zz46". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.670387 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/274fbbf3-b927-408e-9594-946f6ea71638-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "274fbbf3-b927-408e-9594-946f6ea71638" (UID: "274fbbf3-b927-408e-9594-946f6ea71638"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.682928 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zz46\" (UniqueName: \"kubernetes.io/projected/0ebba05f-e935-404f-85c0-4bd28a6afd28-kube-api-access-4zz46\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.682971 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0ebba05f-e935-404f-85c0-4bd28a6afd28-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.682983 5003 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0ebba05f-e935-404f-85c0-4bd28a6afd28-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.682995 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/274fbbf3-b927-408e-9594-946f6ea71638-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.683009 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmgvl\" (UniqueName: \"kubernetes.io/projected/274fbbf3-b927-408e-9594-946f6ea71638-kube-api-access-dmgvl\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.683058 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/274fbbf3-b927-408e-9594-946f6ea71638-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.683070 5003 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/274fbbf3-b927-408e-9594-946f6ea71638-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.686539 5003 scope.go:117] "RemoveContainer" containerID="fa64a3fdaea3987586e9a4f1f0d99812e75c6f392e9589b1d61e5b244d0755f5" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.703836 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.710836 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.749905 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-5f14-account-create-update-xsz5w"] Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.751513 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-5f14-account-create-update-xsz5w"] Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.784859 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"43c1199f-e162-4062-a972-417afa58eaa6\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.784990 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qdnk9\" (UniqueName: \"kubernetes.io/projected/43c1199f-e162-4062-a972-417afa58eaa6-kube-api-access-qdnk9\") pod \"43c1199f-e162-4062-a972-417afa58eaa6\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.785276 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43c1199f-e162-4062-a972-417afa58eaa6-scripts\") pod \"43c1199f-e162-4062-a972-417afa58eaa6\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.785395 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43c1199f-e162-4062-a972-417afa58eaa6-config-data\") pod \"43c1199f-e162-4062-a972-417afa58eaa6\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.785451 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/43c1199f-e162-4062-a972-417afa58eaa6-internal-tls-certs\") pod \"43c1199f-e162-4062-a972-417afa58eaa6\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.785629 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/43c1199f-e162-4062-a972-417afa58eaa6-httpd-run\") pod \"43c1199f-e162-4062-a972-417afa58eaa6\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.785693 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43c1199f-e162-4062-a972-417afa58eaa6-combined-ca-bundle\") pod \"43c1199f-e162-4062-a972-417afa58eaa6\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.785850 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/43c1199f-e162-4062-a972-417afa58eaa6-logs\") pod \"43c1199f-e162-4062-a972-417afa58eaa6\" (UID: \"43c1199f-e162-4062-a972-417afa58eaa6\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.786731 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tptlx\" (UniqueName: \"kubernetes.io/projected/5fb2bb50-645d-4cbc-973a-0f5c1847ea46-kube-api-access-tptlx\") pod \"keystone-788e-account-create-update-p27qs\" (UID: \"5fb2bb50-645d-4cbc-973a-0f5c1847ea46\") " pod="openstack/keystone-788e-account-create-update-p27qs" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.786866 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5fb2bb50-645d-4cbc-973a-0f5c1847ea46-operator-scripts\") pod \"keystone-788e-account-create-update-p27qs\" (UID: \"5fb2bb50-645d-4cbc-973a-0f5c1847ea46\") " pod="openstack/keystone-788e-account-create-update-p27qs" Jan 04 12:13:25 crc kubenswrapper[5003]: E0104 12:13:25.787508 5003 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Jan 04 12:13:25 crc kubenswrapper[5003]: E0104 12:13:25.787786 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fb2bb50-645d-4cbc-973a-0f5c1847ea46-operator-scripts podName:5fb2bb50-645d-4cbc-973a-0f5c1847ea46 nodeName:}" failed. No retries permitted until 2026-01-04 12:13:27.787659184 +0000 UTC m=+1523.260689025 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/5fb2bb50-645d-4cbc-973a-0f5c1847ea46-operator-scripts") pod "keystone-788e-account-create-update-p27qs" (UID: "5fb2bb50-645d-4cbc-973a-0f5c1847ea46") : configmap "openstack-scripts" not found Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.790594 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43c1199f-e162-4062-a972-417afa58eaa6-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "43c1199f-e162-4062-a972-417afa58eaa6" (UID: "43c1199f-e162-4062-a972-417afa58eaa6"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.791994 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "43c1199f-e162-4062-a972-417afa58eaa6" (UID: "43c1199f-e162-4062-a972-417afa58eaa6"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.799422 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43c1199f-e162-4062-a972-417afa58eaa6-logs" (OuterVolumeSpecName: "logs") pod "43c1199f-e162-4062-a972-417afa58eaa6" (UID: "43c1199f-e162-4062-a972-417afa58eaa6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.800869 5003 scope.go:117] "RemoveContainer" containerID="db90af2bbb86e3f491229ad487c735393946380d939a9f6fd713ad64285edc75" Jan 04 12:13:25 crc kubenswrapper[5003]: E0104 12:13:25.808300 5003 projected.go:194] Error preparing data for projected volume kube-api-access-tptlx for pod openstack/keystone-788e-account-create-update-p27qs: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 04 12:13:25 crc kubenswrapper[5003]: E0104 12:13:25.808400 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/5fb2bb50-645d-4cbc-973a-0f5c1847ea46-kube-api-access-tptlx podName:5fb2bb50-645d-4cbc-973a-0f5c1847ea46 nodeName:}" failed. No retries permitted until 2026-01-04 12:13:27.808376016 +0000 UTC m=+1523.281405857 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-tptlx" (UniqueName: "kubernetes.io/projected/5fb2bb50-645d-4cbc-973a-0f5c1847ea46-kube-api-access-tptlx") pod "keystone-788e-account-create-update-p27qs" (UID: "5fb2bb50-645d-4cbc-973a-0f5c1847ea46") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.824371 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43c1199f-e162-4062-a972-417afa58eaa6-kube-api-access-qdnk9" (OuterVolumeSpecName: "kube-api-access-qdnk9") pod "43c1199f-e162-4062-a972-417afa58eaa6" (UID: "43c1199f-e162-4062-a972-417afa58eaa6"). InnerVolumeSpecName "kube-api-access-qdnk9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.825555 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.832173 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.844562 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ebba05f-e935-404f-85c0-4bd28a6afd28-config-data" (OuterVolumeSpecName: "config-data") pod "0ebba05f-e935-404f-85c0-4bd28a6afd28" (UID: "0ebba05f-e935-404f-85c0-4bd28a6afd28"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.847309 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43c1199f-e162-4062-a972-417afa58eaa6-scripts" (OuterVolumeSpecName: "scripts") pod "43c1199f-e162-4062-a972-417afa58eaa6" (UID: "43c1199f-e162-4062-a972-417afa58eaa6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.880812 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-70b4-account-create-update-gwtlx"] Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.888786 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28122a6a-8b54-4ff3-9092-a1f7439a35cf-combined-ca-bundle\") pod \"28122a6a-8b54-4ff3-9092-a1f7439a35cf\" (UID: \"28122a6a-8b54-4ff3-9092-a1f7439a35cf\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.888898 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-combined-ca-bundle\") pod \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.888980 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-logs\") pod \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.889056 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28122a6a-8b54-4ff3-9092-a1f7439a35cf-config-data\") pod \"28122a6a-8b54-4ff3-9092-a1f7439a35cf\" (UID: \"28122a6a-8b54-4ff3-9092-a1f7439a35cf\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.889080 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-internal-tls-certs\") pod \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.889151 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-public-tls-certs\") pod \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.889260 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-scripts\") pod \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.889330 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-95w2v\" (UniqueName: \"kubernetes.io/projected/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-kube-api-access-95w2v\") pod \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.889385 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-config-data-custom\") pod \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.889446 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-etc-machine-id\") pod \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.889493 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-config-data\") pod \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.889571 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rjm4f\" (UniqueName: \"kubernetes.io/projected/28122a6a-8b54-4ff3-9092-a1f7439a35cf-kube-api-access-rjm4f\") pod \"28122a6a-8b54-4ff3-9092-a1f7439a35cf\" (UID: \"28122a6a-8b54-4ff3-9092-a1f7439a35cf\") " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.890062 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ebba05f-e935-404f-85c0-4bd28a6afd28-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.890088 5003 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/43c1199f-e162-4062-a972-417afa58eaa6-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.890100 5003 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/43c1199f-e162-4062-a972-417afa58eaa6-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.890128 5003 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.890141 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qdnk9\" (UniqueName: \"kubernetes.io/projected/43c1199f-e162-4062-a972-417afa58eaa6-kube-api-access-qdnk9\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.890156 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43c1199f-e162-4062-a972-417afa58eaa6-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.891162 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-logs" (OuterVolumeSpecName: "logs") pod "2c899132-ee49-4ab3-89ea-95f0bfcb71ab" (UID: "2c899132-ee49-4ab3-89ea-95f0bfcb71ab"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.891605 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "2c899132-ee49-4ab3-89ea-95f0bfcb71ab" (UID: "2c899132-ee49-4ab3-89ea-95f0bfcb71ab"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.897164 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-70b4-account-create-update-gwtlx"] Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.900222 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "2c899132-ee49-4ab3-89ea-95f0bfcb71ab" (UID: "2c899132-ee49-4ab3-89ea-95f0bfcb71ab"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.902143 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.908130 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28122a6a-8b54-4ff3-9092-a1f7439a35cf-kube-api-access-rjm4f" (OuterVolumeSpecName: "kube-api-access-rjm4f") pod "28122a6a-8b54-4ff3-9092-a1f7439a35cf" (UID: "28122a6a-8b54-4ff3-9092-a1f7439a35cf"). InnerVolumeSpecName "kube-api-access-rjm4f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.917574 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-scripts" (OuterVolumeSpecName: "scripts") pod "2c899132-ee49-4ab3-89ea-95f0bfcb71ab" (UID: "2c899132-ee49-4ab3-89ea-95f0bfcb71ab"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.919933 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-kube-api-access-95w2v" (OuterVolumeSpecName: "kube-api-access-95w2v") pod "2c899132-ee49-4ab3-89ea-95f0bfcb71ab" (UID: "2c899132-ee49-4ab3-89ea-95f0bfcb71ab"). InnerVolumeSpecName "kube-api-access-95w2v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.922651 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.939977 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ebba05f-e935-404f-85c0-4bd28a6afd28-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0ebba05f-e935-404f-85c0-4bd28a6afd28" (UID: "0ebba05f-e935-404f-85c0-4bd28a6afd28"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.940292 5003 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.967734 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-daa1-account-create-update-m44v2"] Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.976528 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-daa1-account-create-update-m44v2"] Jan 04 12:13:25 crc kubenswrapper[5003]: I0104 12:13:25.990220 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.006837 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.010899 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-95w2v\" (UniqueName: \"kubernetes.io/projected/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-kube-api-access-95w2v\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.010934 5003 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.010965 5003 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.010977 5003 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.010987 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ebba05f-e935-404f-85c0-4bd28a6afd28-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.010997 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rjm4f\" (UniqueName: \"kubernetes.io/projected/28122a6a-8b54-4ff3-9092-a1f7439a35cf-kube-api-access-rjm4f\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.011036 5003 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.011046 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: E0104 12:13:26.011150 5003 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Jan 04 12:13:26 crc kubenswrapper[5003]: E0104 12:13:26.011229 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9f2d1a12-b4ce-48c2-88b0-5ae881760963-operator-scripts podName:9f2d1a12-b4ce-48c2-88b0-5ae881760963 nodeName:}" failed. No retries permitted until 2026-01-04 12:13:28.011209957 +0000 UTC m=+1523.484239798 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/9f2d1a12-b4ce-48c2-88b0-5ae881760963-operator-scripts") pod "root-account-create-update-kn44b" (UID: "9f2d1a12-b4ce-48c2-88b0-5ae881760963") : configmap "openstack-scripts" not found Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.035414 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43c1199f-e162-4062-a972-417afa58eaa6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "43c1199f-e162-4062-a972-417afa58eaa6" (UID: "43c1199f-e162-4062-a972-417afa58eaa6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.059963 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-553f-account-create-update-kjhcr"] Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.066193 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ebba05f-e935-404f-85c0-4bd28a6afd28-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "0ebba05f-e935-404f-85c0-4bd28a6afd28" (UID: "0ebba05f-e935-404f-85c0-4bd28a6afd28"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.070232 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/274fbbf3-b927-408e-9594-946f6ea71638-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "274fbbf3-b927-408e-9594-946f6ea71638" (UID: "274fbbf3-b927-408e-9594-946f6ea71638"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.074316 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-553f-account-create-update-kjhcr"] Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.108510 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28122a6a-8b54-4ff3-9092-a1f7439a35cf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "28122a6a-8b54-4ff3-9092-a1f7439a35cf" (UID: "28122a6a-8b54-4ff3-9092-a1f7439a35cf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.115405 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2c899132-ee49-4ab3-89ea-95f0bfcb71ab" (UID: "2c899132-ee49-4ab3-89ea-95f0bfcb71ab"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.115605 5003 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/274fbbf3-b927-408e-9594-946f6ea71638-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.115632 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28122a6a-8b54-4ff3-9092-a1f7439a35cf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.115641 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.115651 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43c1199f-e162-4062-a972-417afa58eaa6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.115678 5003 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ebba05f-e935-404f-85c0-4bd28a6afd28-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.138074 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fjrtq"] Jan 04 12:13:26 crc kubenswrapper[5003]: E0104 12:13:26.138466 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afad0966-8385-444b-9eed-8418c0a49b2a" containerName="proxy-server" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.138485 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="afad0966-8385-444b-9eed-8418c0a49b2a" containerName="proxy-server" Jan 04 12:13:26 crc kubenswrapper[5003]: E0104 12:13:26.138502 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9f73829-d0a1-4e4d-8f5a-755d63ce1caa" containerName="mysql-bootstrap" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.138509 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9f73829-d0a1-4e4d-8f5a-755d63ce1caa" containerName="mysql-bootstrap" Jan 04 12:13:26 crc kubenswrapper[5003]: E0104 12:13:26.138517 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afad0966-8385-444b-9eed-8418c0a49b2a" containerName="proxy-httpd" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.138522 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="afad0966-8385-444b-9eed-8418c0a49b2a" containerName="proxy-httpd" Jan 04 12:13:26 crc kubenswrapper[5003]: E0104 12:13:26.138531 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9f73829-d0a1-4e4d-8f5a-755d63ce1caa" containerName="galera" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.138537 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9f73829-d0a1-4e4d-8f5a-755d63ce1caa" containerName="galera" Jan 04 12:13:26 crc kubenswrapper[5003]: E0104 12:13:26.138548 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28122a6a-8b54-4ff3-9092-a1f7439a35cf" containerName="nova-cell0-conductor-conductor" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.138554 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="28122a6a-8b54-4ff3-9092-a1f7439a35cf" containerName="nova-cell0-conductor-conductor" Jan 04 12:13:26 crc kubenswrapper[5003]: E0104 12:13:26.138566 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43c1199f-e162-4062-a972-417afa58eaa6" containerName="glance-httpd" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.138573 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="43c1199f-e162-4062-a972-417afa58eaa6" containerName="glance-httpd" Jan 04 12:13:26 crc kubenswrapper[5003]: E0104 12:13:26.138585 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43c1199f-e162-4062-a972-417afa58eaa6" containerName="glance-log" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.138628 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="43c1199f-e162-4062-a972-417afa58eaa6" containerName="glance-log" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.138615 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43c1199f-e162-4062-a972-417afa58eaa6-config-data" (OuterVolumeSpecName: "config-data") pod "43c1199f-e162-4062-a972-417afa58eaa6" (UID: "43c1199f-e162-4062-a972-417afa58eaa6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: E0104 12:13:26.138637 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ebba05f-e935-404f-85c0-4bd28a6afd28" containerName="placement-log" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.138664 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ebba05f-e935-404f-85c0-4bd28a6afd28" containerName="placement-log" Jan 04 12:13:26 crc kubenswrapper[5003]: E0104 12:13:26.138678 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ebba05f-e935-404f-85c0-4bd28a6afd28" containerName="placement-api" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.138686 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ebba05f-e935-404f-85c0-4bd28a6afd28" containerName="placement-api" Jan 04 12:13:26 crc kubenswrapper[5003]: E0104 12:13:26.138699 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90ff3d24-4688-4c94-b5d6-c408e74c28ae" containerName="kube-state-metrics" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.138707 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="90ff3d24-4688-4c94-b5d6-c408e74c28ae" containerName="kube-state-metrics" Jan 04 12:13:26 crc kubenswrapper[5003]: E0104 12:13:26.138716 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c899132-ee49-4ab3-89ea-95f0bfcb71ab" containerName="cinder-api" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.138722 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c899132-ee49-4ab3-89ea-95f0bfcb71ab" containerName="cinder-api" Jan 04 12:13:26 crc kubenswrapper[5003]: E0104 12:13:26.138731 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c899132-ee49-4ab3-89ea-95f0bfcb71ab" containerName="cinder-api-log" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.138737 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c899132-ee49-4ab3-89ea-95f0bfcb71ab" containerName="cinder-api-log" Jan 04 12:13:26 crc kubenswrapper[5003]: E0104 12:13:26.138749 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="890e99fd-959d-4946-9716-acfe78278964" containerName="nova-cell1-novncproxy-novncproxy" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.138754 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="890e99fd-959d-4946-9716-acfe78278964" containerName="nova-cell1-novncproxy-novncproxy" Jan 04 12:13:26 crc kubenswrapper[5003]: E0104 12:13:26.138767 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="274fbbf3-b927-408e-9594-946f6ea71638" containerName="memcached" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.138773 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="274fbbf3-b927-408e-9594-946f6ea71638" containerName="memcached" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.138922 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.139036 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ebba05f-e935-404f-85c0-4bd28a6afd28" containerName="placement-log" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.139053 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="43c1199f-e162-4062-a972-417afa58eaa6" containerName="glance-httpd" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.139063 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="afad0966-8385-444b-9eed-8418c0a49b2a" containerName="proxy-server" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.139070 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="90ff3d24-4688-4c94-b5d6-c408e74c28ae" containerName="kube-state-metrics" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.139078 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ebba05f-e935-404f-85c0-4bd28a6afd28" containerName="placement-api" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.139086 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="afad0966-8385-444b-9eed-8418c0a49b2a" containerName="proxy-httpd" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.139094 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="890e99fd-959d-4946-9716-acfe78278964" containerName="nova-cell1-novncproxy-novncproxy" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.139106 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9f73829-d0a1-4e4d-8f5a-755d63ce1caa" containerName="galera" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.139113 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="28122a6a-8b54-4ff3-9092-a1f7439a35cf" containerName="nova-cell0-conductor-conductor" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.139124 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="43c1199f-e162-4062-a972-417afa58eaa6" containerName="glance-log" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.139132 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c899132-ee49-4ab3-89ea-95f0bfcb71ab" containerName="cinder-api-log" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.139143 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="274fbbf3-b927-408e-9594-946f6ea71638" containerName="memcached" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.139153 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c899132-ee49-4ab3-89ea-95f0bfcb71ab" containerName="cinder-api" Jan 04 12:13:26 crc kubenswrapper[5003]: E0104 12:13:26.139333 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="714823a9-560a-496c-b975-2db1099ad873" containerName="glance-httpd" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.139342 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="714823a9-560a-496c-b975-2db1099ad873" containerName="glance-httpd" Jan 04 12:13:26 crc kubenswrapper[5003]: E0104 12:13:26.139353 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="714823a9-560a-496c-b975-2db1099ad873" containerName="glance-log" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.139359 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="714823a9-560a-496c-b975-2db1099ad873" containerName="glance-log" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.139540 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="714823a9-560a-496c-b975-2db1099ad873" containerName="glance-httpd" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.139560 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="714823a9-560a-496c-b975-2db1099ad873" containerName="glance-log" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.141200 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fjrtq" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.144006 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fjrtq"] Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.157160 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28122a6a-8b54-4ff3-9092-a1f7439a35cf-config-data" (OuterVolumeSpecName: "config-data") pod "28122a6a-8b54-4ff3-9092-a1f7439a35cf" (UID: "28122a6a-8b54-4ff3-9092-a1f7439a35cf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.158040 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-66fdccd748-zk2qt" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.227340 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/714823a9-560a-496c-b975-2db1099ad873-logs\") pod \"714823a9-560a-496c-b975-2db1099ad873\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") " Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.230185 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/714823a9-560a-496c-b975-2db1099ad873-public-tls-certs\") pod \"714823a9-560a-496c-b975-2db1099ad873\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") " Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.230241 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/714823a9-560a-496c-b975-2db1099ad873-httpd-run\") pod \"714823a9-560a-496c-b975-2db1099ad873\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") " Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.230289 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hlmrv\" (UniqueName: \"kubernetes.io/projected/714823a9-560a-496c-b975-2db1099ad873-kube-api-access-hlmrv\") pod \"714823a9-560a-496c-b975-2db1099ad873\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") " Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.230336 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-internal-tls-certs\") pod \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\" (UID: \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\") " Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.230364 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/714823a9-560a-496c-b975-2db1099ad873-combined-ca-bundle\") pod \"714823a9-560a-496c-b975-2db1099ad873\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") " Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.230394 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/714823a9-560a-496c-b975-2db1099ad873-config-data\") pod \"714823a9-560a-496c-b975-2db1099ad873\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") " Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.230383 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-config-data" (OuterVolumeSpecName: "config-data") pod "2c899132-ee49-4ab3-89ea-95f0bfcb71ab" (UID: "2c899132-ee49-4ab3-89ea-95f0bfcb71ab"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.230735 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/714823a9-560a-496c-b975-2db1099ad873-scripts\") pod \"714823a9-560a-496c-b975-2db1099ad873\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") " Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.230849 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-config-data-custom\") pod \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\" (UID: \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\") " Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.230879 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"714823a9-560a-496c-b975-2db1099ad873\" (UID: \"714823a9-560a-496c-b975-2db1099ad873\") " Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.230916 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-config-data\") pod \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\" (UID: \"2c899132-ee49-4ab3-89ea-95f0bfcb71ab\") " Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.230946 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-combined-ca-bundle\") pod \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\" (UID: \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\") " Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.230975 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-logs\") pod \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\" (UID: \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\") " Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.231086 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-public-tls-certs\") pod \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\" (UID: \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\") " Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.231172 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mb4ms\" (UniqueName: \"kubernetes.io/projected/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-kube-api-access-mb4ms\") pod \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\" (UID: \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\") " Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.231234 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-config-data\") pod \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\" (UID: \"b53805c6-4e15-4580-a60d-1f0c9c1fcef6\") " Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.231667 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wpmd4\" (UniqueName: \"kubernetes.io/projected/99f5999d-5397-4e91-b56d-d0d543afc2a7-kube-api-access-wpmd4\") pod \"community-operators-fjrtq\" (UID: \"99f5999d-5397-4e91-b56d-d0d543afc2a7\") " pod="openshift-marketplace/community-operators-fjrtq" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.231774 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99f5999d-5397-4e91-b56d-d0d543afc2a7-utilities\") pod \"community-operators-fjrtq\" (UID: \"99f5999d-5397-4e91-b56d-d0d543afc2a7\") " pod="openshift-marketplace/community-operators-fjrtq" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.231958 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99f5999d-5397-4e91-b56d-d0d543afc2a7-catalog-content\") pod \"community-operators-fjrtq\" (UID: \"99f5999d-5397-4e91-b56d-d0d543afc2a7\") " pod="openshift-marketplace/community-operators-fjrtq" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.232436 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28122a6a-8b54-4ff3-9092-a1f7439a35cf-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.232457 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43c1199f-e162-4062-a972-417afa58eaa6-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.234097 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-logs" (OuterVolumeSpecName: "logs") pod "b53805c6-4e15-4580-a60d-1f0c9c1fcef6" (UID: "b53805c6-4e15-4580-a60d-1f0c9c1fcef6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.235202 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/714823a9-560a-496c-b975-2db1099ad873-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "714823a9-560a-496c-b975-2db1099ad873" (UID: "714823a9-560a-496c-b975-2db1099ad873"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.235437 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/714823a9-560a-496c-b975-2db1099ad873-logs" (OuterVolumeSpecName: "logs") pod "714823a9-560a-496c-b975-2db1099ad873" (UID: "714823a9-560a-496c-b975-2db1099ad873"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: W0104 12:13:26.235534 5003 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/2c899132-ee49-4ab3-89ea-95f0bfcb71ab/volumes/kubernetes.io~secret/config-data Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.235546 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-config-data" (OuterVolumeSpecName: "config-data") pod "2c899132-ee49-4ab3-89ea-95f0bfcb71ab" (UID: "2c899132-ee49-4ab3-89ea-95f0bfcb71ab"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.247583 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "714823a9-560a-496c-b975-2db1099ad873" (UID: "714823a9-560a-496c-b975-2db1099ad873"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.247633 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/714823a9-560a-496c-b975-2db1099ad873-kube-api-access-hlmrv" (OuterVolumeSpecName: "kube-api-access-hlmrv") pod "714823a9-560a-496c-b975-2db1099ad873" (UID: "714823a9-560a-496c-b975-2db1099ad873"). InnerVolumeSpecName "kube-api-access-hlmrv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.255263 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43c1199f-e162-4062-a972-417afa58eaa6-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "43c1199f-e162-4062-a972-417afa58eaa6" (UID: "43c1199f-e162-4062-a972-417afa58eaa6"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.261315 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ebba05f-e935-404f-85c0-4bd28a6afd28-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "0ebba05f-e935-404f-85c0-4bd28a6afd28" (UID: "0ebba05f-e935-404f-85c0-4bd28a6afd28"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.262397 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b53805c6-4e15-4580-a60d-1f0c9c1fcef6" (UID: "b53805c6-4e15-4580-a60d-1f0c9c1fcef6"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.267069 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "2c899132-ee49-4ab3-89ea-95f0bfcb71ab" (UID: "2c899132-ee49-4ab3-89ea-95f0bfcb71ab"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.268375 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-kube-api-access-mb4ms" (OuterVolumeSpecName: "kube-api-access-mb4ms") pod "b53805c6-4e15-4580-a60d-1f0c9c1fcef6" (UID: "b53805c6-4e15-4580-a60d-1f0c9c1fcef6"). InnerVolumeSpecName "kube-api-access-mb4ms". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.291345 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/714823a9-560a-496c-b975-2db1099ad873-scripts" (OuterVolumeSpecName: "scripts") pod "714823a9-560a-496c-b975-2db1099ad873" (UID: "714823a9-560a-496c-b975-2db1099ad873"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.306247 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/714823a9-560a-496c-b975-2db1099ad873-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "714823a9-560a-496c-b975-2db1099ad873" (UID: "714823a9-560a-496c-b975-2db1099ad873"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.335589 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99f5999d-5397-4e91-b56d-d0d543afc2a7-catalog-content\") pod \"community-operators-fjrtq\" (UID: \"99f5999d-5397-4e91-b56d-d0d543afc2a7\") " pod="openshift-marketplace/community-operators-fjrtq" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.335708 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wpmd4\" (UniqueName: \"kubernetes.io/projected/99f5999d-5397-4e91-b56d-d0d543afc2a7-kube-api-access-wpmd4\") pod \"community-operators-fjrtq\" (UID: \"99f5999d-5397-4e91-b56d-d0d543afc2a7\") " pod="openshift-marketplace/community-operators-fjrtq" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.335758 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99f5999d-5397-4e91-b56d-d0d543afc2a7-utilities\") pod \"community-operators-fjrtq\" (UID: \"99f5999d-5397-4e91-b56d-d0d543afc2a7\") " pod="openshift-marketplace/community-operators-fjrtq" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.336162 5003 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.336174 5003 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/714823a9-560a-496c-b975-2db1099ad873-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.336183 5003 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/714823a9-560a-496c-b975-2db1099ad873-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.336194 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hlmrv\" (UniqueName: \"kubernetes.io/projected/714823a9-560a-496c-b975-2db1099ad873-kube-api-access-hlmrv\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.336204 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/714823a9-560a-496c-b975-2db1099ad873-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.336214 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/714823a9-560a-496c-b975-2db1099ad873-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.336224 5003 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ebba05f-e935-404f-85c0-4bd28a6afd28-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.336237 5003 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.336257 5003 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.336267 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.336276 5003 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.336287 5003 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/43c1199f-e162-4062-a972-417afa58eaa6-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.336295 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mb4ms\" (UniqueName: \"kubernetes.io/projected/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-kube-api-access-mb4ms\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.337034 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99f5999d-5397-4e91-b56d-d0d543afc2a7-catalog-content\") pod \"community-operators-fjrtq\" (UID: \"99f5999d-5397-4e91-b56d-d0d543afc2a7\") " pod="openshift-marketplace/community-operators-fjrtq" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.337575 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99f5999d-5397-4e91-b56d-d0d543afc2a7-utilities\") pod \"community-operators-fjrtq\" (UID: \"99f5999d-5397-4e91-b56d-d0d543afc2a7\") " pod="openshift-marketplace/community-operators-fjrtq" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.340573 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b53805c6-4e15-4580-a60d-1f0c9c1fcef6" (UID: "b53805c6-4e15-4580-a60d-1f0c9c1fcef6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.354079 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wpmd4\" (UniqueName: \"kubernetes.io/projected/99f5999d-5397-4e91-b56d-d0d543afc2a7-kube-api-access-wpmd4\") pod \"community-operators-fjrtq\" (UID: \"99f5999d-5397-4e91-b56d-d0d543afc2a7\") " pod="openshift-marketplace/community-operators-fjrtq" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.359438 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "2c899132-ee49-4ab3-89ea-95f0bfcb71ab" (UID: "2c899132-ee49-4ab3-89ea-95f0bfcb71ab"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.359568 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "b53805c6-4e15-4580-a60d-1f0c9c1fcef6" (UID: "b53805c6-4e15-4580-a60d-1f0c9c1fcef6"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.361690 5003 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.405209 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/714823a9-560a-496c-b975-2db1099ad873-config-data" (OuterVolumeSpecName: "config-data") pod "714823a9-560a-496c-b975-2db1099ad873" (UID: "714823a9-560a-496c-b975-2db1099ad873"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.415533 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "b53805c6-4e15-4580-a60d-1f0c9c1fcef6" (UID: "b53805c6-4e15-4580-a60d-1f0c9c1fcef6"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.422860 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/714823a9-560a-496c-b975-2db1099ad873-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "714823a9-560a-496c-b975-2db1099ad873" (UID: "714823a9-560a-496c-b975-2db1099ad873"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.440287 5003 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.440336 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.440351 5003 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.440367 5003 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/714823a9-560a-496c-b975-2db1099ad873-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.440381 5003 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c899132-ee49-4ab3-89ea-95f0bfcb71ab-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.440392 5003 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.440401 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/714823a9-560a-496c-b975-2db1099ad873-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.478140 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-config-data" (OuterVolumeSpecName: "config-data") pod "b53805c6-4e15-4580-a60d-1f0c9c1fcef6" (UID: "b53805c6-4e15-4580-a60d-1f0c9c1fcef6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.500708 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"714823a9-560a-496c-b975-2db1099ad873","Type":"ContainerDied","Data":"5079538a30b9ac5c3493bac521155c91f7dc65296908f66d20dc4e686b909028"} Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.500825 5003 scope.go:117] "RemoveContainer" containerID="5b23433759eb714853942994a37054e568cfd11f999d8e6d3f6f86c33c3787c4" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.501051 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.541513 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-66fdccd748-zk2qt" event={"ID":"b53805c6-4e15-4580-a60d-1f0c9c1fcef6","Type":"ContainerDied","Data":"7ad830d8390930ebcfe985abb2a6cc13a55e16f03367ecdb17cdf43794420ce8"} Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.541680 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-66fdccd748-zk2qt" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.542110 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b53805c6-4e15-4580-a60d-1f0c9c1fcef6-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.557061 5003 generic.go:334] "Generic (PLEG): container finished" podID="5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42" containerID="e66e7dbec82eeff67f71401a2ea11fe88a1996da2709ae8632f320656877be7d" exitCode=0 Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.557186 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42","Type":"ContainerDied","Data":"e66e7dbec82eeff67f71401a2ea11fe88a1996da2709ae8632f320656877be7d"} Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.577360 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc","Type":"ContainerDied","Data":"3d3e15b330c64221f5822e535c747346dcf75ad8ef1ec7f171c687c7c7388cf9"} Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.577412 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3d3e15b330c64221f5822e535c747346dcf75ad8ef1ec7f171c687c7c7388cf9" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.596776 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-kn44b" event={"ID":"9f2d1a12-b4ce-48c2-88b0-5ae881760963","Type":"ContainerDied","Data":"27f22212fb6f4e2a634420812b10a71db275049351456b55108ab2c11eb76cc5"} Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.596818 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="27f22212fb6f4e2a634420812b10a71db275049351456b55108ab2c11eb76cc5" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.615337 5003 scope.go:117] "RemoveContainer" containerID="02447dca870462f4df40064d89c32f3a3d466ae10125a4bb87a31ddc1ecd1f9e" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.615802 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"43c1199f-e162-4062-a972-417afa58eaa6","Type":"ContainerDied","Data":"ab44f32cf572bf8c1e8ffc367ae849e6c8acdc85910460ec964a7aad847487a0"} Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.615986 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.616994 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.649056 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.652812 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"28122a6a-8b54-4ff3-9092-a1f7439a35cf","Type":"ContainerDied","Data":"9e8de1c7eb573065126ac170b63253962cc73824f45c5ce5383a20fee3a33e6f"} Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.666107 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-66fdccd748-zk2qt"] Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.680885 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.707876 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.708917 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"2c899132-ee49-4ab3-89ea-95f0bfcb71ab","Type":"ContainerDied","Data":"e7f8d7fa6a185457322a4d262490f9d28e3431b5d35bf629a605517ded7a3222"} Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.750586 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-internal-tls-certs\") pod \"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc\" (UID: \"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc\") " Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.750987 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-logs\") pod \"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc\" (UID: \"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc\") " Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.751033 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-config-data\") pod \"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc\" (UID: \"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc\") " Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.751084 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4c84d72-3209-4925-9eb2-cbebcd1e8ae7-config-data\") pod \"b4c84d72-3209-4925-9eb2-cbebcd1e8ae7\" (UID: \"b4c84d72-3209-4925-9eb2-cbebcd1e8ae7\") " Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.751126 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4c84d72-3209-4925-9eb2-cbebcd1e8ae7-nova-metadata-tls-certs\") pod \"b4c84d72-3209-4925-9eb2-cbebcd1e8ae7\" (UID: \"b4c84d72-3209-4925-9eb2-cbebcd1e8ae7\") " Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.751150 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4c84d72-3209-4925-9eb2-cbebcd1e8ae7-logs\") pod \"b4c84d72-3209-4925-9eb2-cbebcd1e8ae7\" (UID: \"b4c84d72-3209-4925-9eb2-cbebcd1e8ae7\") " Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.751184 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4c84d72-3209-4925-9eb2-cbebcd1e8ae7-combined-ca-bundle\") pod \"b4c84d72-3209-4925-9eb2-cbebcd1e8ae7\" (UID: \"b4c84d72-3209-4925-9eb2-cbebcd1e8ae7\") " Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.751350 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-public-tls-certs\") pod \"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc\" (UID: \"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc\") " Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.751414 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jgzj8\" (UniqueName: \"kubernetes.io/projected/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-kube-api-access-jgzj8\") pod \"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc\" (UID: \"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc\") " Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.751449 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-combined-ca-bundle\") pod \"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc\" (UID: \"0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc\") " Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.751501 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8r2wk\" (UniqueName: \"kubernetes.io/projected/b4c84d72-3209-4925-9eb2-cbebcd1e8ae7-kube-api-access-8r2wk\") pod \"b4c84d72-3209-4925-9eb2-cbebcd1e8ae7\" (UID: \"b4c84d72-3209-4925-9eb2-cbebcd1e8ae7\") " Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.753493 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fjrtq" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.760954 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-66fdccd748-zk2qt"] Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.763094 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4c84d72-3209-4925-9eb2-cbebcd1e8ae7-logs" (OuterVolumeSpecName: "logs") pod "b4c84d72-3209-4925-9eb2-cbebcd1e8ae7" (UID: "b4c84d72-3209-4925-9eb2-cbebcd1e8ae7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.764324 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-logs" (OuterVolumeSpecName: "logs") pod "0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc" (UID: "0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.765443 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4c84d72-3209-4925-9eb2-cbebcd1e8ae7-kube-api-access-8r2wk" (OuterVolumeSpecName: "kube-api-access-8r2wk") pod "b4c84d72-3209-4925-9eb2-cbebcd1e8ae7" (UID: "b4c84d72-3209-4925-9eb2-cbebcd1e8ae7"). InnerVolumeSpecName "kube-api-access-8r2wk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.788408 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.804120 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.826405 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b4c84d72-3209-4925-9eb2-cbebcd1e8ae7","Type":"ContainerDied","Data":"339fc6a2c3d87ae14805161c26cba53699a18a3387d7641a5b00e92740038d5e"} Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.844728 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-kn44b" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.843453 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-kube-api-access-jgzj8" (OuterVolumeSpecName: "kube-api-access-jgzj8") pod "0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc" (UID: "0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc"). InnerVolumeSpecName "kube-api-access-jgzj8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.826595 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.853975 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-7d88cc4d8f-5mhr4" podUID="23de8292-dc91-45db-8de9-59933352e3f2" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.157:9696/\": dial tcp 10.217.0.157:9696: connect: connection refused" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.854903 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4c84d72-3209-4925-9eb2-cbebcd1e8ae7-config-data" (OuterVolumeSpecName: "config-data") pod "b4c84d72-3209-4925-9eb2-cbebcd1e8ae7" (UID: "b4c84d72-3209-4925-9eb2-cbebcd1e8ae7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.865043 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-788e-account-create-update-p27qs" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.865217 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.865301 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5569cb5574-jt6r6" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.866492 5003 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4c84d72-3209-4925-9eb2-cbebcd1e8ae7-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: E0104 12:13:26.866610 5003 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 04 12:13:26 crc kubenswrapper[5003]: E0104 12:13:26.866706 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/829003dc-aa5e-43a6-a4f5-c578c73e76d4-config-data podName:829003dc-aa5e-43a6-a4f5-c578c73e76d4 nodeName:}" failed. No retries permitted until 2026-01-04 12:13:34.866675624 +0000 UTC m=+1530.339705465 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/829003dc-aa5e-43a6-a4f5-c578c73e76d4-config-data") pod "rabbitmq-cell1-server-0" (UID: "829003dc-aa5e-43a6-a4f5-c578c73e76d4") : configmap "rabbitmq-cell1-config-data" not found Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.892136 5003 scope.go:117] "RemoveContainer" containerID="dd895a8362663704ae95aaa9cadf69c10b404ecae29d602906e53e472c16265f" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.893071 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jgzj8\" (UniqueName: \"kubernetes.io/projected/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-kube-api-access-jgzj8\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.893504 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8r2wk\" (UniqueName: \"kubernetes.io/projected/b4c84d72-3209-4925-9eb2-cbebcd1e8ae7-kube-api-access-8r2wk\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.893549 5003 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.902719 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc" (UID: "0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.927753 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28114f85-1d9b-4e71-be5f-d721f06c70dc" path="/var/lib/kubelet/pods/28114f85-1d9b-4e71-be5f-d721f06c70dc/volumes" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.929648 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f54e309-ce75-4ab4-8fad-f7c1ad1c9cb7" path="/var/lib/kubelet/pods/2f54e309-ce75-4ab4-8fad-f7c1ad1c9cb7/volumes" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.930281 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="714823a9-560a-496c-b975-2db1099ad873" path="/var/lib/kubelet/pods/714823a9-560a-496c-b975-2db1099ad873/volumes" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.931045 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.942364 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="890e99fd-959d-4946-9716-acfe78278964" path="/var/lib/kubelet/pods/890e99fd-959d-4946-9716-acfe78278964/volumes" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.943419 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90ff3d24-4688-4c94-b5d6-c408e74c28ae" path="/var/lib/kubelet/pods/90ff3d24-4688-4c94-b5d6-c408e74c28ae/volumes" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.950360 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="afad0966-8385-444b-9eed-8418c0a49b2a" path="/var/lib/kubelet/pods/afad0966-8385-444b-9eed-8418c0a49b2a/volumes" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.951471 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b53805c6-4e15-4580-a60d-1f0c9c1fcef6" path="/var/lib/kubelet/pods/b53805c6-4e15-4580-a60d-1f0c9c1fcef6/volumes" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.952274 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c304f0e5-d232-4549-ab8f-f11f5008f903" path="/var/lib/kubelet/pods/c304f0e5-d232-4549-ab8f-f11f5008f903/volumes" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.961483 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9f73829-d0a1-4e4d-8f5a-755d63ce1caa" path="/var/lib/kubelet/pods/e9f73829-d0a1-4e4d-8f5a-755d63ce1caa/volumes" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.969160 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fba35580-43f2-4d96-8c52-6da2b5fdbd94" path="/var/lib/kubelet/pods/fba35580-43f2-4d96-8c52-6da2b5fdbd94/volumes" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.975779 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc" (UID: "0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.976410 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-config-data" (OuterVolumeSpecName: "config-data") pod "0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc" (UID: "0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.995363 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-22bgs\" (UniqueName: \"kubernetes.io/projected/9f2d1a12-b4ce-48c2-88b0-5ae881760963-kube-api-access-22bgs\") pod \"9f2d1a12-b4ce-48c2-88b0-5ae881760963\" (UID: \"9f2d1a12-b4ce-48c2-88b0-5ae881760963\") " Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.995815 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9f2d1a12-b4ce-48c2-88b0-5ae881760963-operator-scripts\") pod \"9f2d1a12-b4ce-48c2-88b0-5ae881760963\" (UID: \"9f2d1a12-b4ce-48c2-88b0-5ae881760963\") " Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.996922 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f2d1a12-b4ce-48c2-88b0-5ae881760963-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9f2d1a12-b4ce-48c2-88b0-5ae881760963" (UID: "9f2d1a12-b4ce-48c2-88b0-5ae881760963"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.997464 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.997482 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9f2d1a12-b4ce-48c2-88b0-5ae881760963-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.997492 5003 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.997505 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:26 crc kubenswrapper[5003]: I0104 12:13:26.997517 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4c84d72-3209-4925-9eb2-cbebcd1e8ae7-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.026413 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f2d1a12-b4ce-48c2-88b0-5ae881760963-kube-api-access-22bgs" (OuterVolumeSpecName: "kube-api-access-22bgs") pod "9f2d1a12-b4ce-48c2-88b0-5ae881760963" (UID: "9f2d1a12-b4ce-48c2-88b0-5ae881760963"). InnerVolumeSpecName "kube-api-access-22bgs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.031230 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4c84d72-3209-4925-9eb2-cbebcd1e8ae7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b4c84d72-3209-4925-9eb2-cbebcd1e8ae7" (UID: "b4c84d72-3209-4925-9eb2-cbebcd1e8ae7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.036885 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4c84d72-3209-4925-9eb2-cbebcd1e8ae7-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "b4c84d72-3209-4925-9eb2-cbebcd1e8ae7" (UID: "b4c84d72-3209-4925-9eb2-cbebcd1e8ae7"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.066430 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc" (UID: "0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.099981 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-scripts\") pod \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\" (UID: \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\") " Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.100061 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-combined-ca-bundle\") pod \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\" (UID: \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\") " Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.101175 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-config-data\") pod \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\" (UID: \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\") " Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.101213 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n2tnh\" (UniqueName: \"kubernetes.io/projected/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-kube-api-access-n2tnh\") pod \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\" (UID: \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\") " Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.101233 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-log-httpd\") pod \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\" (UID: \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\") " Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.101289 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-ceilometer-tls-certs\") pod \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\" (UID: \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\") " Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.101307 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-sg-core-conf-yaml\") pod \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\" (UID: \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\") " Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.101337 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-run-httpd\") pod \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\" (UID: \"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42\") " Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.101933 5003 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4c84d72-3209-4925-9eb2-cbebcd1e8ae7-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.101952 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4c84d72-3209-4925-9eb2-cbebcd1e8ae7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.101962 5003 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.101974 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-22bgs\" (UniqueName: \"kubernetes.io/projected/9f2d1a12-b4ce-48c2-88b0-5ae881760963-kube-api-access-22bgs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.103780 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42" (UID: "5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.118746 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42" (UID: "5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.126722 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-scripts" (OuterVolumeSpecName: "scripts") pod "5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42" (UID: "5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.142829 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.142865 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.142881 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.142893 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.142903 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.142913 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.177952 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-kube-api-access-n2tnh" (OuterVolumeSpecName: "kube-api-access-n2tnh") pod "5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42" (UID: "5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42"). InnerVolumeSpecName "kube-api-access-n2tnh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.179141 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42" (UID: "5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.185095 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-788e-account-create-update-p27qs"] Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.189670 5003 scope.go:117] "RemoveContainer" containerID="8100c888907e0c359672d9aa57c58750ef202eac34f6410af86c42eefa66cc49" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.199281 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-788e-account-create-update-p27qs"] Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.206189 5003 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.206498 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.206662 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n2tnh\" (UniqueName: \"kubernetes.io/projected/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-kube-api-access-n2tnh\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.206738 5003 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.206812 5003 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.207264 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42" (UID: "5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.239976 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.245240 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42" (UID: "5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.275688 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/memcached-0"] Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.290661 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-config-data" (OuterVolumeSpecName: "config-data") pod "5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42" (UID: "5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.311189 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.311216 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.311225 5003 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.311235 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5fb2bb50-645d-4cbc-973a-0f5c1847ea46-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.311244 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tptlx\" (UniqueName: \"kubernetes.io/projected/5fb2bb50-645d-4cbc-973a-0f5c1847ea46-kube-api-access-tptlx\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:27 crc kubenswrapper[5003]: E0104 12:13:27.487067 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3 is running failed: container process not found" containerID="4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:13:27 crc kubenswrapper[5003]: E0104 12:13:27.487570 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3 is running failed: container process not found" containerID="4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:13:27 crc kubenswrapper[5003]: E0104 12:13:27.488133 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3 is running failed: container process not found" containerID="4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:13:27 crc kubenswrapper[5003]: E0104 12:13:27.488208 5003 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-99mjg" podUID="d91cd6f4-0e52-4519-b337-9a7c2779b7f1" containerName="ovsdb-server" Jan 04 12:13:27 crc kubenswrapper[5003]: E0104 12:13:27.493461 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dc2a9fd5a4e5efaaf4816f8b92204d68eeef36829b12562226d86ab9c2026294" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:13:27 crc kubenswrapper[5003]: E0104 12:13:27.495840 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dc2a9fd5a4e5efaaf4816f8b92204d68eeef36829b12562226d86ab9c2026294" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:13:27 crc kubenswrapper[5003]: E0104 12:13:27.497125 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dc2a9fd5a4e5efaaf4816f8b92204d68eeef36829b12562226d86ab9c2026294" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:13:27 crc kubenswrapper[5003]: E0104 12:13:27.497224 5003 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-99mjg" podUID="d91cd6f4-0e52-4519-b337-9a7c2779b7f1" containerName="ovs-vswitchd" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.525200 5003 scope.go:117] "RemoveContainer" containerID="193d03f2dc8b6a55e957d530e54441551f102796455d35a76dcd721cbba41982" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.532286 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.547344 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.568165 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.570413 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.579981 5003 scope.go:117] "RemoveContainer" containerID="2faff7fc607a82782ff7622c7373fabbd6ab09171776a5361884986f16ec6df5" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.590142 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-5569cb5574-jt6r6"] Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.602054 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-5569cb5574-jt6r6"] Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.619730 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-config-data-generated\") pod \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") " Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.619791 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h77s5\" (UniqueName: \"kubernetes.io/projected/7d5d490f-d968-4237-8a63-7f7d01b8708d-kube-api-access-h77s5\") pod \"7d5d490f-d968-4237-8a63-7f7d01b8708d\" (UID: \"7d5d490f-d968-4237-8a63-7f7d01b8708d\") " Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.619840 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4rkz6\" (UniqueName: \"kubernetes.io/projected/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-kube-api-access-4rkz6\") pod \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") " Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.619903 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-config-data-default\") pod \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") " Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.619957 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7d5d490f-d968-4237-8a63-7f7d01b8708d-config-data-custom\") pod \"7d5d490f-d968-4237-8a63-7f7d01b8708d\" (UID: \"7d5d490f-d968-4237-8a63-7f7d01b8708d\") " Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.620006 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-kolla-config\") pod \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") " Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.620093 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") " Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.620164 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d5d490f-d968-4237-8a63-7f7d01b8708d-scripts\") pod \"7d5d490f-d968-4237-8a63-7f7d01b8708d\" (UID: \"7d5d490f-d968-4237-8a63-7f7d01b8708d\") " Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.620221 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-combined-ca-bundle\") pod \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") " Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.620255 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-operator-scripts\") pod \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") " Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.620323 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7d5d490f-d968-4237-8a63-7f7d01b8708d-etc-machine-id\") pod \"7d5d490f-d968-4237-8a63-7f7d01b8708d\" (UID: \"7d5d490f-d968-4237-8a63-7f7d01b8708d\") " Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.620368 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-galera-tls-certs\") pod \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\" (UID: \"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9\") " Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.620448 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d5d490f-d968-4237-8a63-7f7d01b8708d-config-data\") pod \"7d5d490f-d968-4237-8a63-7f7d01b8708d\" (UID: \"7d5d490f-d968-4237-8a63-7f7d01b8708d\") " Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.620480 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d5d490f-d968-4237-8a63-7f7d01b8708d-combined-ca-bundle\") pod \"7d5d490f-d968-4237-8a63-7f7d01b8708d\" (UID: \"7d5d490f-d968-4237-8a63-7f7d01b8708d\") " Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.622890 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "30c47e0c-622e-4f66-a71d-f7e6cc0f23d9" (UID: "30c47e0c-622e-4f66-a71d-f7e6cc0f23d9"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.623793 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "30c47e0c-622e-4f66-a71d-f7e6cc0f23d9" (UID: "30c47e0c-622e-4f66-a71d-f7e6cc0f23d9"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.629945 5003 scope.go:117] "RemoveContainer" containerID="716e1877196d93764346bb1a22d3036cf3d4fe6efcbf6126e0f1878e472f9fc4" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.634092 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "30c47e0c-622e-4f66-a71d-f7e6cc0f23d9" (UID: "30c47e0c-622e-4f66-a71d-f7e6cc0f23d9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.636936 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d5d490f-d968-4237-8a63-7f7d01b8708d-kube-api-access-h77s5" (OuterVolumeSpecName: "kube-api-access-h77s5") pod "7d5d490f-d968-4237-8a63-7f7d01b8708d" (UID: "7d5d490f-d968-4237-8a63-7f7d01b8708d"). InnerVolumeSpecName "kube-api-access-h77s5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.637207 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d5d490f-d968-4237-8a63-7f7d01b8708d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "7d5d490f-d968-4237-8a63-7f7d01b8708d" (UID: "7d5d490f-d968-4237-8a63-7f7d01b8708d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.637361 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7d5d490f-d968-4237-8a63-7f7d01b8708d-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "7d5d490f-d968-4237-8a63-7f7d01b8708d" (UID: "7d5d490f-d968-4237-8a63-7f7d01b8708d"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.637832 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "30c47e0c-622e-4f66-a71d-f7e6cc0f23d9" (UID: "30c47e0c-622e-4f66-a71d-f7e6cc0f23d9"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.638154 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-kube-api-access-4rkz6" (OuterVolumeSpecName: "kube-api-access-4rkz6") pod "30c47e0c-622e-4f66-a71d-f7e6cc0f23d9" (UID: "30c47e0c-622e-4f66-a71d-f7e6cc0f23d9"). InnerVolumeSpecName "kube-api-access-4rkz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.647678 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "mysql-db") pod "30c47e0c-622e-4f66-a71d-f7e6cc0f23d9" (UID: "30c47e0c-622e-4f66-a71d-f7e6cc0f23d9"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.652359 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fjrtq"] Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.654800 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d5d490f-d968-4237-8a63-7f7d01b8708d-scripts" (OuterVolumeSpecName: "scripts") pod "7d5d490f-d968-4237-8a63-7f7d01b8708d" (UID: "7d5d490f-d968-4237-8a63-7f7d01b8708d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.690805 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d5d490f-d968-4237-8a63-7f7d01b8708d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7d5d490f-d968-4237-8a63-7f7d01b8708d" (UID: "7d5d490f-d968-4237-8a63-7f7d01b8708d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.691553 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "30c47e0c-622e-4f66-a71d-f7e6cc0f23d9" (UID: "30c47e0c-622e-4f66-a71d-f7e6cc0f23d9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.692487 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "30c47e0c-622e-4f66-a71d-f7e6cc0f23d9" (UID: "30c47e0c-622e-4f66-a71d-f7e6cc0f23d9"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.722712 5003 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7d5d490f-d968-4237-8a63-7f7d01b8708d-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.723136 5003 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.723147 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d5d490f-d968-4237-8a63-7f7d01b8708d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.723157 5003 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.723169 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h77s5\" (UniqueName: \"kubernetes.io/projected/7d5d490f-d968-4237-8a63-7f7d01b8708d-kube-api-access-h77s5\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.723178 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4rkz6\" (UniqueName: \"kubernetes.io/projected/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-kube-api-access-4rkz6\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.723187 5003 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.723196 5003 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7d5d490f-d968-4237-8a63-7f7d01b8708d-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.723207 5003 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.723229 5003 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.723238 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d5d490f-d968-4237-8a63-7f7d01b8708d-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.723254 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.723263 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.725991 5003 scope.go:117] "RemoveContainer" containerID="46919056bf4b2db61c4b851726637476ed5f878bddbce873c5a30520c666773e" Jan 04 12:13:27 crc kubenswrapper[5003]: W0104 12:13:27.726860 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod99f5999d_5397_4e91_b56d_d0d543afc2a7.slice/crio-966807ae8519bff3763321c5298e896817952acc60ff7db8630e2f508424f490 WatchSource:0}: Error finding container 966807ae8519bff3763321c5298e896817952acc60ff7db8630e2f508424f490: Status 404 returned error can't find the container with id 966807ae8519bff3763321c5298e896817952acc60ff7db8630e2f508424f490 Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.744230 5003 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.759790 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d5d490f-d968-4237-8a63-7f7d01b8708d-config-data" (OuterVolumeSpecName: "config-data") pod "7d5d490f-d968-4237-8a63-7f7d01b8708d" (UID: "7d5d490f-d968-4237-8a63-7f7d01b8708d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.821788 5003 scope.go:117] "RemoveContainer" containerID="8dc168dd5d62f051b24906056242c3db9d4e7a18cca4bd69532834764b2e4b47" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.824300 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d5d490f-d968-4237-8a63-7f7d01b8708d-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.824327 5003 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.866458 5003 scope.go:117] "RemoveContainer" containerID="88cf151cf804256f12f3d9496a2faf5cc39d58a9955cca558c56b93f8e9f6281" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.887107 5003 generic.go:334] "Generic (PLEG): container finished" podID="30c47e0c-622e-4f66-a71d-f7e6cc0f23d9" containerID="c51f66616df30830ce3cae7d185eefe24cf5be0fe4db029c3e5c213dfd1ab368" exitCode=0 Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.887181 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9","Type":"ContainerDied","Data":"c51f66616df30830ce3cae7d185eefe24cf5be0fe4db029c3e5c213dfd1ab368"} Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.887193 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.887213 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"30c47e0c-622e-4f66-a71d-f7e6cc0f23d9","Type":"ContainerDied","Data":"820ed0f4b8611cfdb78035e27df4f2a9fff7e8716ead65a412cd9e625a125567"} Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.899964 5003 generic.go:334] "Generic (PLEG): container finished" podID="597b6841-5a72-4d8d-b2a6-dec279d628d0" containerID="f6abe0f83c7bb707281a5925c06f33a9019ba1df8b444b347dc26031613596ff" exitCode=0 Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.900077 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-85dcb76789-v5z7d" event={"ID":"597b6841-5a72-4d8d-b2a6-dec279d628d0","Type":"ContainerDied","Data":"f6abe0f83c7bb707281a5925c06f33a9019ba1df8b444b347dc26031613596ff"} Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.909225 5003 generic.go:334] "Generic (PLEG): container finished" podID="829003dc-aa5e-43a6-a4f5-c578c73e76d4" containerID="a7db53d84b6d5b63248f6eb1e83906ab06a6912bc5b207be4b9a8cd84f1c3d9f" exitCode=0 Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.909335 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"829003dc-aa5e-43a6-a4f5-c578c73e76d4","Type":"ContainerDied","Data":"a7db53d84b6d5b63248f6eb1e83906ab06a6912bc5b207be4b9a8cd84f1c3d9f"} Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.915360 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42","Type":"ContainerDied","Data":"70aae434263e1dd5f3014d291c91e75d5bad2e7f752df83cda006c169d11dc8e"} Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.915526 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.930803 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_653b2e11-95ca-46e7-b28c-a1170d7a180b/ovn-northd/0.log" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.930848 5003 generic.go:334] "Generic (PLEG): container finished" podID="653b2e11-95ca-46e7-b28c-a1170d7a180b" containerID="3e03c9994a07fa7e18a42bd5c17e591979717aa51ecb5df6f1ce553e5a854fff" exitCode=139 Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.930902 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"653b2e11-95ca-46e7-b28c-a1170d7a180b","Type":"ContainerDied","Data":"3e03c9994a07fa7e18a42bd5c17e591979717aa51ecb5df6f1ce553e5a854fff"} Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.931249 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-85dcb76789-v5z7d" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.947217 5003 generic.go:334] "Generic (PLEG): container finished" podID="7d5d490f-d968-4237-8a63-7f7d01b8708d" containerID="5e99af0ebfd30376d7774551d5e2627fb89feccfb91fc75b405039132573a695" exitCode=0 Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.947296 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7d5d490f-d968-4237-8a63-7f7d01b8708d","Type":"ContainerDied","Data":"5e99af0ebfd30376d7774551d5e2627fb89feccfb91fc75b405039132573a695"} Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.947330 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"7d5d490f-d968-4237-8a63-7f7d01b8708d","Type":"ContainerDied","Data":"58151d94a3364d675297e071fbac00a96c7a23ef674378752a5510281eb8f1d1"} Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.947400 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.950813 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.951646 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.952319 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fjrtq" event={"ID":"99f5999d-5397-4e91-b56d-d0d543afc2a7","Type":"ContainerStarted","Data":"966807ae8519bff3763321c5298e896817952acc60ff7db8630e2f508424f490"} Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.952508 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-kn44b" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.958943 5003 scope.go:117] "RemoveContainer" containerID="a03dc47ea5678d35499c7d2c162e59903757ffa22b0c3abb3b985027ba48121d" Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.970348 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-galera-0"] Jan 04 12:13:27 crc kubenswrapper[5003]: I0104 12:13:27.999613 5003 scope.go:117] "RemoveContainer" containerID="c51f66616df30830ce3cae7d185eefe24cf5be0fe4db029c3e5c213dfd1ab368" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.011565 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.023201 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.028823 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.030355 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-credential-keys\") pod \"597b6841-5a72-4d8d-b2a6-dec279d628d0\" (UID: \"597b6841-5a72-4d8d-b2a6-dec279d628d0\") " Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.030404 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-scripts\") pod \"597b6841-5a72-4d8d-b2a6-dec279d628d0\" (UID: \"597b6841-5a72-4d8d-b2a6-dec279d628d0\") " Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.030485 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-public-tls-certs\") pod \"597b6841-5a72-4d8d-b2a6-dec279d628d0\" (UID: \"597b6841-5a72-4d8d-b2a6-dec279d628d0\") " Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.030605 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rgqdq\" (UniqueName: \"kubernetes.io/projected/597b6841-5a72-4d8d-b2a6-dec279d628d0-kube-api-access-rgqdq\") pod \"597b6841-5a72-4d8d-b2a6-dec279d628d0\" (UID: \"597b6841-5a72-4d8d-b2a6-dec279d628d0\") " Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.030634 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-fernet-keys\") pod \"597b6841-5a72-4d8d-b2a6-dec279d628d0\" (UID: \"597b6841-5a72-4d8d-b2a6-dec279d628d0\") " Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.030658 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-internal-tls-certs\") pod \"597b6841-5a72-4d8d-b2a6-dec279d628d0\" (UID: \"597b6841-5a72-4d8d-b2a6-dec279d628d0\") " Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.030716 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-combined-ca-bundle\") pod \"597b6841-5a72-4d8d-b2a6-dec279d628d0\" (UID: \"597b6841-5a72-4d8d-b2a6-dec279d628d0\") " Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.030796 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-config-data\") pod \"597b6841-5a72-4d8d-b2a6-dec279d628d0\" (UID: \"597b6841-5a72-4d8d-b2a6-dec279d628d0\") " Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.031634 5003 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.031692 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/81193935-fcd0-4877-9d65-6155c1a888e2-config-data podName:81193935-fcd0-4877-9d65-6155c1a888e2 nodeName:}" failed. No retries permitted until 2026-01-04 12:13:36.031674422 +0000 UTC m=+1531.504704253 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/81193935-fcd0-4877-9d65-6155c1a888e2-config-data") pod "rabbitmq-server-0" (UID: "81193935-fcd0-4877-9d65-6155c1a888e2") : configmap "rabbitmq-config-data" not found Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.037440 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/597b6841-5a72-4d8d-b2a6-dec279d628d0-kube-api-access-rgqdq" (OuterVolumeSpecName: "kube-api-access-rgqdq") pod "597b6841-5a72-4d8d-b2a6-dec279d628d0" (UID: "597b6841-5a72-4d8d-b2a6-dec279d628d0"). InnerVolumeSpecName "kube-api-access-rgqdq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.037736 5003 scope.go:117] "RemoveContainer" containerID="7cfabb243db2df216602d5fb89bb4ba8ccc3ab533869f71cb92afb81824f5638" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.043952 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.050507 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "597b6841-5a72-4d8d-b2a6-dec279d628d0" (UID: "597b6841-5a72-4d8d-b2a6-dec279d628d0"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.051808 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-scripts" (OuterVolumeSpecName: "scripts") pod "597b6841-5a72-4d8d-b2a6-dec279d628d0" (UID: "597b6841-5a72-4d8d-b2a6-dec279d628d0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.051864 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "597b6841-5a72-4d8d-b2a6-dec279d628d0" (UID: "597b6841-5a72-4d8d-b2a6-dec279d628d0"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.071005 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-kn44b"] Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.080034 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-kn44b"] Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.083647 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "597b6841-5a72-4d8d-b2a6-dec279d628d0" (UID: "597b6841-5a72-4d8d-b2a6-dec279d628d0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.091930 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-config-data" (OuterVolumeSpecName: "config-data") pod "597b6841-5a72-4d8d-b2a6-dec279d628d0" (UID: "597b6841-5a72-4d8d-b2a6-dec279d628d0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.104049 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.112745 5003 scope.go:117] "RemoveContainer" containerID="c51f66616df30830ce3cae7d185eefe24cf5be0fe4db029c3e5c213dfd1ab368" Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.115403 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c51f66616df30830ce3cae7d185eefe24cf5be0fe4db029c3e5c213dfd1ab368\": container with ID starting with c51f66616df30830ce3cae7d185eefe24cf5be0fe4db029c3e5c213dfd1ab368 not found: ID does not exist" containerID="c51f66616df30830ce3cae7d185eefe24cf5be0fe4db029c3e5c213dfd1ab368" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.115470 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c51f66616df30830ce3cae7d185eefe24cf5be0fe4db029c3e5c213dfd1ab368"} err="failed to get container status \"c51f66616df30830ce3cae7d185eefe24cf5be0fe4db029c3e5c213dfd1ab368\": rpc error: code = NotFound desc = could not find container \"c51f66616df30830ce3cae7d185eefe24cf5be0fe4db029c3e5c213dfd1ab368\": container with ID starting with c51f66616df30830ce3cae7d185eefe24cf5be0fe4db029c3e5c213dfd1ab368 not found: ID does not exist" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.115510 5003 scope.go:117] "RemoveContainer" containerID="7cfabb243db2df216602d5fb89bb4ba8ccc3ab533869f71cb92afb81824f5638" Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.115964 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7cfabb243db2df216602d5fb89bb4ba8ccc3ab533869f71cb92afb81824f5638\": container with ID starting with 7cfabb243db2df216602d5fb89bb4ba8ccc3ab533869f71cb92afb81824f5638 not found: ID does not exist" containerID="7cfabb243db2df216602d5fb89bb4ba8ccc3ab533869f71cb92afb81824f5638" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.115989 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7cfabb243db2df216602d5fb89bb4ba8ccc3ab533869f71cb92afb81824f5638"} err="failed to get container status \"7cfabb243db2df216602d5fb89bb4ba8ccc3ab533869f71cb92afb81824f5638\": rpc error: code = NotFound desc = could not find container \"7cfabb243db2df216602d5fb89bb4ba8ccc3ab533869f71cb92afb81824f5638\": container with ID starting with 7cfabb243db2df216602d5fb89bb4ba8ccc3ab533869f71cb92afb81824f5638 not found: ID does not exist" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.116008 5003 scope.go:117] "RemoveContainer" containerID="07b2e1874e1e6350f660adafaf47ead2fb632e268672667911eded298b5a3565" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.120725 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.132882 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_653b2e11-95ca-46e7-b28c-a1170d7a180b/ovn-northd/0.log" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.132970 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.135686 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.135719 5003 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.135735 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.135746 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rgqdq\" (UniqueName: \"kubernetes.io/projected/597b6841-5a72-4d8d-b2a6-dec279d628d0-kube-api-access-rgqdq\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.135756 5003 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.135770 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.164442 5003 scope.go:117] "RemoveContainer" containerID="c595cd857ef9b98c086bda781fccff343323ff20c25f68b8460e16c62cecb388" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.174025 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "597b6841-5a72-4d8d-b2a6-dec279d628d0" (UID: "597b6841-5a72-4d8d-b2a6-dec279d628d0"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.177151 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "597b6841-5a72-4d8d-b2a6-dec279d628d0" (UID: "597b6841-5a72-4d8d-b2a6-dec279d628d0"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.237078 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/653b2e11-95ca-46e7-b28c-a1170d7a180b-config\") pod \"653b2e11-95ca-46e7-b28c-a1170d7a180b\" (UID: \"653b2e11-95ca-46e7-b28c-a1170d7a180b\") " Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.237178 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/653b2e11-95ca-46e7-b28c-a1170d7a180b-scripts\") pod \"653b2e11-95ca-46e7-b28c-a1170d7a180b\" (UID: \"653b2e11-95ca-46e7-b28c-a1170d7a180b\") " Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.237239 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/653b2e11-95ca-46e7-b28c-a1170d7a180b-combined-ca-bundle\") pod \"653b2e11-95ca-46e7-b28c-a1170d7a180b\" (UID: \"653b2e11-95ca-46e7-b28c-a1170d7a180b\") " Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.237314 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j8j8k\" (UniqueName: \"kubernetes.io/projected/653b2e11-95ca-46e7-b28c-a1170d7a180b-kube-api-access-j8j8k\") pod \"653b2e11-95ca-46e7-b28c-a1170d7a180b\" (UID: \"653b2e11-95ca-46e7-b28c-a1170d7a180b\") " Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.237352 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/653b2e11-95ca-46e7-b28c-a1170d7a180b-ovn-rundir\") pod \"653b2e11-95ca-46e7-b28c-a1170d7a180b\" (UID: \"653b2e11-95ca-46e7-b28c-a1170d7a180b\") " Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.237399 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/653b2e11-95ca-46e7-b28c-a1170d7a180b-metrics-certs-tls-certs\") pod \"653b2e11-95ca-46e7-b28c-a1170d7a180b\" (UID: \"653b2e11-95ca-46e7-b28c-a1170d7a180b\") " Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.237503 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/653b2e11-95ca-46e7-b28c-a1170d7a180b-ovn-northd-tls-certs\") pod \"653b2e11-95ca-46e7-b28c-a1170d7a180b\" (UID: \"653b2e11-95ca-46e7-b28c-a1170d7a180b\") " Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.238029 5003 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.238047 5003 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/597b6841-5a72-4d8d-b2a6-dec279d628d0-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.238787 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/653b2e11-95ca-46e7-b28c-a1170d7a180b-config" (OuterVolumeSpecName: "config") pod "653b2e11-95ca-46e7-b28c-a1170d7a180b" (UID: "653b2e11-95ca-46e7-b28c-a1170d7a180b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.240272 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/653b2e11-95ca-46e7-b28c-a1170d7a180b-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "653b2e11-95ca-46e7-b28c-a1170d7a180b" (UID: "653b2e11-95ca-46e7-b28c-a1170d7a180b"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.241155 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/653b2e11-95ca-46e7-b28c-a1170d7a180b-scripts" (OuterVolumeSpecName: "scripts") pod "653b2e11-95ca-46e7-b28c-a1170d7a180b" (UID: "653b2e11-95ca-46e7-b28c-a1170d7a180b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.248661 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/653b2e11-95ca-46e7-b28c-a1170d7a180b-kube-api-access-j8j8k" (OuterVolumeSpecName: "kube-api-access-j8j8k") pod "653b2e11-95ca-46e7-b28c-a1170d7a180b" (UID: "653b2e11-95ca-46e7-b28c-a1170d7a180b"). InnerVolumeSpecName "kube-api-access-j8j8k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.273329 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/653b2e11-95ca-46e7-b28c-a1170d7a180b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "653b2e11-95ca-46e7-b28c-a1170d7a180b" (UID: "653b2e11-95ca-46e7-b28c-a1170d7a180b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.305905 5003 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/653b2e11-95ca-46e7-b28c-a1170d7a180b-metrics-certs-tls-certs podName:653b2e11-95ca-46e7-b28c-a1170d7a180b nodeName:}" failed. No retries permitted until 2026-01-04 12:13:28.805872938 +0000 UTC m=+1524.278902779 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/653b2e11-95ca-46e7-b28c-a1170d7a180b-metrics-certs-tls-certs") pod "653b2e11-95ca-46e7-b28c-a1170d7a180b" (UID: "653b2e11-95ca-46e7-b28c-a1170d7a180b") : error deleting /var/lib/kubelet/pods/653b2e11-95ca-46e7-b28c-a1170d7a180b/volume-subpaths: remove /var/lib/kubelet/pods/653b2e11-95ca-46e7-b28c-a1170d7a180b/volume-subpaths: no such file or directory Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.308528 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/653b2e11-95ca-46e7-b28c-a1170d7a180b-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "653b2e11-95ca-46e7-b28c-a1170d7a180b" (UID: "653b2e11-95ca-46e7-b28c-a1170d7a180b"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.339866 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/653b2e11-95ca-46e7-b28c-a1170d7a180b-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.339901 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/653b2e11-95ca-46e7-b28c-a1170d7a180b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.339913 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j8j8k\" (UniqueName: \"kubernetes.io/projected/653b2e11-95ca-46e7-b28c-a1170d7a180b-kube-api-access-j8j8k\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.339922 5003 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/653b2e11-95ca-46e7-b28c-a1170d7a180b-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.339933 5003 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/653b2e11-95ca-46e7-b28c-a1170d7a180b-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.339942 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/653b2e11-95ca-46e7-b28c-a1170d7a180b-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.354859 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.361033 5003 scope.go:117] "RemoveContainer" containerID="e66e7dbec82eeff67f71401a2ea11fe88a1996da2709ae8632f320656877be7d" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.383338 5003 scope.go:117] "RemoveContainer" containerID="2d1f1d056b61194569686724f2003dba8a8e5a0d07b9d3c97b835d48ef2f3c01" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.430211 5003 scope.go:117] "RemoveContainer" containerID="08cc0e5f5513aaea421a5ce329e2f5d7400d11b3025b3985ed67ee4874cda18c" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.440826 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/829003dc-aa5e-43a6-a4f5-c578c73e76d4-pod-info\") pod \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.440886 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/829003dc-aa5e-43a6-a4f5-c578c73e76d4-rabbitmq-confd\") pod \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.440930 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/829003dc-aa5e-43a6-a4f5-c578c73e76d4-config-data\") pod \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.440995 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/829003dc-aa5e-43a6-a4f5-c578c73e76d4-rabbitmq-erlang-cookie\") pod \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.441050 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/829003dc-aa5e-43a6-a4f5-c578c73e76d4-erlang-cookie-secret\") pod \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.441100 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/829003dc-aa5e-43a6-a4f5-c578c73e76d4-rabbitmq-tls\") pod \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.441157 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/829003dc-aa5e-43a6-a4f5-c578c73e76d4-plugins-conf\") pod \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.441212 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/829003dc-aa5e-43a6-a4f5-c578c73e76d4-server-conf\") pod \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.441237 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/829003dc-aa5e-43a6-a4f5-c578c73e76d4-rabbitmq-plugins\") pod \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.441284 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vr8lh\" (UniqueName: \"kubernetes.io/projected/829003dc-aa5e-43a6-a4f5-c578c73e76d4-kube-api-access-vr8lh\") pod \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.441320 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\" (UID: \"829003dc-aa5e-43a6-a4f5-c578c73e76d4\") " Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.442977 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/829003dc-aa5e-43a6-a4f5-c578c73e76d4-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "829003dc-aa5e-43a6-a4f5-c578c73e76d4" (UID: "829003dc-aa5e-43a6-a4f5-c578c73e76d4"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.443636 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/829003dc-aa5e-43a6-a4f5-c578c73e76d4-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "829003dc-aa5e-43a6-a4f5-c578c73e76d4" (UID: "829003dc-aa5e-43a6-a4f5-c578c73e76d4"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.444484 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/829003dc-aa5e-43a6-a4f5-c578c73e76d4-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "829003dc-aa5e-43a6-a4f5-c578c73e76d4" (UID: "829003dc-aa5e-43a6-a4f5-c578c73e76d4"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.448541 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/829003dc-aa5e-43a6-a4f5-c578c73e76d4-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "829003dc-aa5e-43a6-a4f5-c578c73e76d4" (UID: "829003dc-aa5e-43a6-a4f5-c578c73e76d4"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.449839 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/829003dc-aa5e-43a6-a4f5-c578c73e76d4-pod-info" (OuterVolumeSpecName: "pod-info") pod "829003dc-aa5e-43a6-a4f5-c578c73e76d4" (UID: "829003dc-aa5e-43a6-a4f5-c578c73e76d4"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.451934 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "persistence") pod "829003dc-aa5e-43a6-a4f5-c578c73e76d4" (UID: "829003dc-aa5e-43a6-a4f5-c578c73e76d4"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.452299 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/829003dc-aa5e-43a6-a4f5-c578c73e76d4-kube-api-access-vr8lh" (OuterVolumeSpecName: "kube-api-access-vr8lh") pod "829003dc-aa5e-43a6-a4f5-c578c73e76d4" (UID: "829003dc-aa5e-43a6-a4f5-c578c73e76d4"). InnerVolumeSpecName "kube-api-access-vr8lh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.458005 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/829003dc-aa5e-43a6-a4f5-c578c73e76d4-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "829003dc-aa5e-43a6-a4f5-c578c73e76d4" (UID: "829003dc-aa5e-43a6-a4f5-c578c73e76d4"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.459556 5003 scope.go:117] "RemoveContainer" containerID="5e99af0ebfd30376d7774551d5e2627fb89feccfb91fc75b405039132573a695" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.471434 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/829003dc-aa5e-43a6-a4f5-c578c73e76d4-config-data" (OuterVolumeSpecName: "config-data") pod "829003dc-aa5e-43a6-a4f5-c578c73e76d4" (UID: "829003dc-aa5e-43a6-a4f5-c578c73e76d4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.495855 5003 scope.go:117] "RemoveContainer" containerID="08cc0e5f5513aaea421a5ce329e2f5d7400d11b3025b3985ed67ee4874cda18c" Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.522244 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08cc0e5f5513aaea421a5ce329e2f5d7400d11b3025b3985ed67ee4874cda18c\": container with ID starting with 08cc0e5f5513aaea421a5ce329e2f5d7400d11b3025b3985ed67ee4874cda18c not found: ID does not exist" containerID="08cc0e5f5513aaea421a5ce329e2f5d7400d11b3025b3985ed67ee4874cda18c" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.524921 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08cc0e5f5513aaea421a5ce329e2f5d7400d11b3025b3985ed67ee4874cda18c"} err="failed to get container status \"08cc0e5f5513aaea421a5ce329e2f5d7400d11b3025b3985ed67ee4874cda18c\": rpc error: code = NotFound desc = could not find container \"08cc0e5f5513aaea421a5ce329e2f5d7400d11b3025b3985ed67ee4874cda18c\": container with ID starting with 08cc0e5f5513aaea421a5ce329e2f5d7400d11b3025b3985ed67ee4874cda18c not found: ID does not exist" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.525052 5003 scope.go:117] "RemoveContainer" containerID="5e99af0ebfd30376d7774551d5e2627fb89feccfb91fc75b405039132573a695" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.524627 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/829003dc-aa5e-43a6-a4f5-c578c73e76d4-server-conf" (OuterVolumeSpecName: "server-conf") pod "829003dc-aa5e-43a6-a4f5-c578c73e76d4" (UID: "829003dc-aa5e-43a6-a4f5-c578c73e76d4"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.532208 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e99af0ebfd30376d7774551d5e2627fb89feccfb91fc75b405039132573a695\": container with ID starting with 5e99af0ebfd30376d7774551d5e2627fb89feccfb91fc75b405039132573a695 not found: ID does not exist" containerID="5e99af0ebfd30376d7774551d5e2627fb89feccfb91fc75b405039132573a695" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.532471 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e99af0ebfd30376d7774551d5e2627fb89feccfb91fc75b405039132573a695"} err="failed to get container status \"5e99af0ebfd30376d7774551d5e2627fb89feccfb91fc75b405039132573a695\": rpc error: code = NotFound desc = could not find container \"5e99af0ebfd30376d7774551d5e2627fb89feccfb91fc75b405039132573a695\": container with ID starting with 5e99af0ebfd30376d7774551d5e2627fb89feccfb91fc75b405039132573a695 not found: ID does not exist" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.544924 5003 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/829003dc-aa5e-43a6-a4f5-c578c73e76d4-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.544965 5003 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/829003dc-aa5e-43a6-a4f5-c578c73e76d4-server-conf\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.544979 5003 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/829003dc-aa5e-43a6-a4f5-c578c73e76d4-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.544988 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vr8lh\" (UniqueName: \"kubernetes.io/projected/829003dc-aa5e-43a6-a4f5-c578c73e76d4-kube-api-access-vr8lh\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.545028 5003 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.545039 5003 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/829003dc-aa5e-43a6-a4f5-c578c73e76d4-pod-info\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.545048 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/829003dc-aa5e-43a6-a4f5-c578c73e76d4-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.545057 5003 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/829003dc-aa5e-43a6-a4f5-c578c73e76d4-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.545071 5003 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/829003dc-aa5e-43a6-a4f5-c578c73e76d4-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.545080 5003 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/829003dc-aa5e-43a6-a4f5-c578c73e76d4-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.569323 5003 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.574102 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/829003dc-aa5e-43a6-a4f5-c578c73e76d4-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "829003dc-aa5e-43a6-a4f5-c578c73e76d4" (UID: "829003dc-aa5e-43a6-a4f5-c578c73e76d4"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.647470 5003 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/829003dc-aa5e-43a6-a4f5-c578c73e76d4-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.647532 5003 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.777825 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-h4rlr"] Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.778850 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="597b6841-5a72-4d8d-b2a6-dec279d628d0" containerName="keystone-api" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.778876 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="597b6841-5a72-4d8d-b2a6-dec279d628d0" containerName="keystone-api" Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.778894 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b53805c6-4e15-4580-a60d-1f0c9c1fcef6" containerName="barbican-api-log" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.778901 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b53805c6-4e15-4580-a60d-1f0c9c1fcef6" containerName="barbican-api-log" Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.778910 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30c47e0c-622e-4f66-a71d-f7e6cc0f23d9" containerName="galera" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.778917 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="30c47e0c-622e-4f66-a71d-f7e6cc0f23d9" containerName="galera" Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.778928 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="653b2e11-95ca-46e7-b28c-a1170d7a180b" containerName="openstack-network-exporter" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.778936 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="653b2e11-95ca-46e7-b28c-a1170d7a180b" containerName="openstack-network-exporter" Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.778954 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4c84d72-3209-4925-9eb2-cbebcd1e8ae7" containerName="nova-metadata-metadata" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.778960 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4c84d72-3209-4925-9eb2-cbebcd1e8ae7" containerName="nova-metadata-metadata" Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.778970 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="829003dc-aa5e-43a6-a4f5-c578c73e76d4" containerName="rabbitmq" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.778976 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="829003dc-aa5e-43a6-a4f5-c578c73e76d4" containerName="rabbitmq" Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.778987 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f2d1a12-b4ce-48c2-88b0-5ae881760963" containerName="mariadb-account-create-update" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.778994 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f2d1a12-b4ce-48c2-88b0-5ae881760963" containerName="mariadb-account-create-update" Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.779004 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42" containerName="proxy-httpd" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779010 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42" containerName="proxy-httpd" Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.779043 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d5d490f-d968-4237-8a63-7f7d01b8708d" containerName="probe" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779050 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d5d490f-d968-4237-8a63-7f7d01b8708d" containerName="probe" Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.779063 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="829003dc-aa5e-43a6-a4f5-c578c73e76d4" containerName="setup-container" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779069 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="829003dc-aa5e-43a6-a4f5-c578c73e76d4" containerName="setup-container" Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.779076 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42" containerName="ceilometer-central-agent" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779087 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42" containerName="ceilometer-central-agent" Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.779099 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc" containerName="nova-api-api" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779106 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc" containerName="nova-api-api" Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.779115 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4c84d72-3209-4925-9eb2-cbebcd1e8ae7" containerName="nova-metadata-log" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779121 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4c84d72-3209-4925-9eb2-cbebcd1e8ae7" containerName="nova-metadata-log" Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.779130 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42" containerName="ceilometer-notification-agent" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779137 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42" containerName="ceilometer-notification-agent" Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.779147 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc" containerName="nova-api-log" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779166 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc" containerName="nova-api-log" Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.779176 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d5d490f-d968-4237-8a63-7f7d01b8708d" containerName="cinder-scheduler" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779182 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d5d490f-d968-4237-8a63-7f7d01b8708d" containerName="cinder-scheduler" Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.779191 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="653b2e11-95ca-46e7-b28c-a1170d7a180b" containerName="ovn-northd" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779198 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="653b2e11-95ca-46e7-b28c-a1170d7a180b" containerName="ovn-northd" Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.779211 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30c47e0c-622e-4f66-a71d-f7e6cc0f23d9" containerName="mysql-bootstrap" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779217 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="30c47e0c-622e-4f66-a71d-f7e6cc0f23d9" containerName="mysql-bootstrap" Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.779231 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b53805c6-4e15-4580-a60d-1f0c9c1fcef6" containerName="barbican-api" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779237 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b53805c6-4e15-4580-a60d-1f0c9c1fcef6" containerName="barbican-api" Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.779247 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42" containerName="sg-core" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779253 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42" containerName="sg-core" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779429 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="653b2e11-95ca-46e7-b28c-a1170d7a180b" containerName="openstack-network-exporter" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779441 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4c84d72-3209-4925-9eb2-cbebcd1e8ae7" containerName="nova-metadata-metadata" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779455 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b53805c6-4e15-4580-a60d-1f0c9c1fcef6" containerName="barbican-api-log" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779466 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f2d1a12-b4ce-48c2-88b0-5ae881760963" containerName="mariadb-account-create-update" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779474 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d5d490f-d968-4237-8a63-7f7d01b8708d" containerName="probe" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779488 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="829003dc-aa5e-43a6-a4f5-c578c73e76d4" containerName="rabbitmq" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779497 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc" containerName="nova-api-api" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779510 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42" containerName="ceilometer-notification-agent" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779517 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42" containerName="sg-core" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779529 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42" containerName="proxy-httpd" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779538 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b53805c6-4e15-4580-a60d-1f0c9c1fcef6" containerName="barbican-api" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779545 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="30c47e0c-622e-4f66-a71d-f7e6cc0f23d9" containerName="galera" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779558 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4c84d72-3209-4925-9eb2-cbebcd1e8ae7" containerName="nova-metadata-log" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779573 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d5d490f-d968-4237-8a63-7f7d01b8708d" containerName="cinder-scheduler" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779585 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="653b2e11-95ca-46e7-b28c-a1170d7a180b" containerName="ovn-northd" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779594 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc" containerName="nova-api-log" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779607 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="597b6841-5a72-4d8d-b2a6-dec279d628d0" containerName="keystone-api" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779618 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f2d1a12-b4ce-48c2-88b0-5ae881760963" containerName="mariadb-account-create-update" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779632 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42" containerName="ceilometer-central-agent" Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.779806 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f2d1a12-b4ce-48c2-88b0-5ae881760963" containerName="mariadb-account-create-update" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.779817 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f2d1a12-b4ce-48c2-88b0-5ae881760963" containerName="mariadb-account-create-update" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.781516 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h4rlr" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.798903 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-h4rlr"] Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.823597 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc" path="/var/lib/kubelet/pods/0afbe6e8-b5bd-4cc7-80df-06cbc395e4bc/volumes" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.824870 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ebba05f-e935-404f-85c0-4bd28a6afd28" path="/var/lib/kubelet/pods/0ebba05f-e935-404f-85c0-4bd28a6afd28/volumes" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.825732 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="274fbbf3-b927-408e-9594-946f6ea71638" path="/var/lib/kubelet/pods/274fbbf3-b927-408e-9594-946f6ea71638/volumes" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.827096 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28122a6a-8b54-4ff3-9092-a1f7439a35cf" path="/var/lib/kubelet/pods/28122a6a-8b54-4ff3-9092-a1f7439a35cf/volumes" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.827718 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c899132-ee49-4ab3-89ea-95f0bfcb71ab" path="/var/lib/kubelet/pods/2c899132-ee49-4ab3-89ea-95f0bfcb71ab/volumes" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.828605 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30c47e0c-622e-4f66-a71d-f7e6cc0f23d9" path="/var/lib/kubelet/pods/30c47e0c-622e-4f66-a71d-f7e6cc0f23d9/volumes" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.829992 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43c1199f-e162-4062-a972-417afa58eaa6" path="/var/lib/kubelet/pods/43c1199f-e162-4062-a972-417afa58eaa6/volumes" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.830671 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42" path="/var/lib/kubelet/pods/5c3a3b40-ff04-4d1d-87c1-0bbff9d7bc42/volumes" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.832804 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fb2bb50-645d-4cbc-973a-0f5c1847ea46" path="/var/lib/kubelet/pods/5fb2bb50-645d-4cbc-973a-0f5c1847ea46/volumes" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.833113 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d5d490f-d968-4237-8a63-7f7d01b8708d" path="/var/lib/kubelet/pods/7d5d490f-d968-4237-8a63-7f7d01b8708d/volumes" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.833637 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f2d1a12-b4ce-48c2-88b0-5ae881760963" path="/var/lib/kubelet/pods/9f2d1a12-b4ce-48c2-88b0-5ae881760963/volumes" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.834682 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4c84d72-3209-4925-9eb2-cbebcd1e8ae7" path="/var/lib/kubelet/pods/b4c84d72-3209-4925-9eb2-cbebcd1e8ae7/volumes" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.850460 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/653b2e11-95ca-46e7-b28c-a1170d7a180b-metrics-certs-tls-certs\") pod \"653b2e11-95ca-46e7-b28c-a1170d7a180b\" (UID: \"653b2e11-95ca-46e7-b28c-a1170d7a180b\") " Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.851403 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fce6607d-86b7-4fda-8916-b05f80feb02c-catalog-content\") pod \"redhat-marketplace-h4rlr\" (UID: \"fce6607d-86b7-4fda-8916-b05f80feb02c\") " pod="openshift-marketplace/redhat-marketplace-h4rlr" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.851457 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fce6607d-86b7-4fda-8916-b05f80feb02c-utilities\") pod \"redhat-marketplace-h4rlr\" (UID: \"fce6607d-86b7-4fda-8916-b05f80feb02c\") " pod="openshift-marketplace/redhat-marketplace-h4rlr" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.851505 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppkx9\" (UniqueName: \"kubernetes.io/projected/fce6607d-86b7-4fda-8916-b05f80feb02c-kube-api-access-ppkx9\") pod \"redhat-marketplace-h4rlr\" (UID: \"fce6607d-86b7-4fda-8916-b05f80feb02c\") " pod="openshift-marketplace/redhat-marketplace-h4rlr" Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.865162 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c3b3aa191257674d28511eb56f215bdb07ddd4b9442282756b29282013c6bebd" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.868900 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/653b2e11-95ca-46e7-b28c-a1170d7a180b-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "653b2e11-95ca-46e7-b28c-a1170d7a180b" (UID: "653b2e11-95ca-46e7-b28c-a1170d7a180b"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.871968 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c3b3aa191257674d28511eb56f215bdb07ddd4b9442282756b29282013c6bebd" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.874082 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c3b3aa191257674d28511eb56f215bdb07ddd4b9442282756b29282013c6bebd" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.874125 5003 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="295f88c8-79ac-463f-85e3-d98dc15dd06f" containerName="nova-scheduler-scheduler" Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.942190 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c036815a0e7e6c9f9f1d469ae796fad9bf4425d1308d4bca8f8c39aa199aae5a" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.948780 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c036815a0e7e6c9f9f1d469ae796fad9bf4425d1308d4bca8f8c39aa199aae5a" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.952788 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppkx9\" (UniqueName: \"kubernetes.io/projected/fce6607d-86b7-4fda-8916-b05f80feb02c-kube-api-access-ppkx9\") pod \"redhat-marketplace-h4rlr\" (UID: \"fce6607d-86b7-4fda-8916-b05f80feb02c\") " pod="openshift-marketplace/redhat-marketplace-h4rlr" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.952932 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fce6607d-86b7-4fda-8916-b05f80feb02c-catalog-content\") pod \"redhat-marketplace-h4rlr\" (UID: \"fce6607d-86b7-4fda-8916-b05f80feb02c\") " pod="openshift-marketplace/redhat-marketplace-h4rlr" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.952965 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fce6607d-86b7-4fda-8916-b05f80feb02c-utilities\") pod \"redhat-marketplace-h4rlr\" (UID: \"fce6607d-86b7-4fda-8916-b05f80feb02c\") " pod="openshift-marketplace/redhat-marketplace-h4rlr" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.953063 5003 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/653b2e11-95ca-46e7-b28c-a1170d7a180b-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.953517 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fce6607d-86b7-4fda-8916-b05f80feb02c-utilities\") pod \"redhat-marketplace-h4rlr\" (UID: \"fce6607d-86b7-4fda-8916-b05f80feb02c\") " pod="openshift-marketplace/redhat-marketplace-h4rlr" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.953546 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fce6607d-86b7-4fda-8916-b05f80feb02c-catalog-content\") pod \"redhat-marketplace-h4rlr\" (UID: \"fce6607d-86b7-4fda-8916-b05f80feb02c\") " pod="openshift-marketplace/redhat-marketplace-h4rlr" Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.960490 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c036815a0e7e6c9f9f1d469ae796fad9bf4425d1308d4bca8f8c39aa199aae5a" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 04 12:13:28 crc kubenswrapper[5003]: E0104 12:13:28.960605 5003 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="ff44c8db-792b-491a-879a-7e1ae7717a0f" containerName="nova-cell1-conductor-conductor" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.972344 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-85dcb76789-v5z7d" event={"ID":"597b6841-5a72-4d8d-b2a6-dec279d628d0","Type":"ContainerDied","Data":"a179d2863e7e105f110032d64f4967a2a2c5e068f56a480a1bdea2a9c9d5831f"} Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.972383 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-85dcb76789-v5z7d" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.972413 5003 scope.go:117] "RemoveContainer" containerID="f6abe0f83c7bb707281a5925c06f33a9019ba1df8b444b347dc26031613596ff" Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.988722 5003 generic.go:334] "Generic (PLEG): container finished" podID="99f5999d-5397-4e91-b56d-d0d543afc2a7" containerID="dded6312c3b00f6ad27902f106b80005667129cc552570a994c6b4381e994a6f" exitCode=0 Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.988827 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fjrtq" event={"ID":"99f5999d-5397-4e91-b56d-d0d543afc2a7","Type":"ContainerDied","Data":"dded6312c3b00f6ad27902f106b80005667129cc552570a994c6b4381e994a6f"} Jan 04 12:13:28 crc kubenswrapper[5003]: I0104 12:13:28.997303 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppkx9\" (UniqueName: \"kubernetes.io/projected/fce6607d-86b7-4fda-8916-b05f80feb02c-kube-api-access-ppkx9\") pod \"redhat-marketplace-h4rlr\" (UID: \"fce6607d-86b7-4fda-8916-b05f80feb02c\") " pod="openshift-marketplace/redhat-marketplace-h4rlr" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.002989 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-85dcb76789-v5z7d"] Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.008590 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-85dcb76789-v5z7d"] Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.024357 5003 generic.go:334] "Generic (PLEG): container finished" podID="81193935-fcd0-4877-9d65-6155c1a888e2" containerID="f0dca325e90af7570f19f9ac0610466deeda038b06d2c3ca9f19a6c46586b480" exitCode=0 Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.024422 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"81193935-fcd0-4877-9d65-6155c1a888e2","Type":"ContainerDied","Data":"f0dca325e90af7570f19f9ac0610466deeda038b06d2c3ca9f19a6c46586b480"} Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.038649 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"829003dc-aa5e-43a6-a4f5-c578c73e76d4","Type":"ContainerDied","Data":"02b4cbf5722d9c379d7df2308f2f7645728dbe01197a0430e9ddf12ac03a3df6"} Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.038719 5003 scope.go:117] "RemoveContainer" containerID="a7db53d84b6d5b63248f6eb1e83906ab06a6912bc5b207be4b9a8cd84f1c3d9f" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.038858 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.057499 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_653b2e11-95ca-46e7-b28c-a1170d7a180b/ovn-northd/0.log" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.057556 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"653b2e11-95ca-46e7-b28c-a1170d7a180b","Type":"ContainerDied","Data":"e3265df4c8826b4c9a574f0a412029cd57e6e0130fed8c6aa5ff21f2e9f8a346"} Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.057650 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.085003 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.093647 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.094722 5003 scope.go:117] "RemoveContainer" containerID="6364cbec859dec141ca449b3d978906aef35d877a78403265122c821233736ff" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.115630 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h4rlr" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.115852 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.125644 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-northd-0"] Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.188664 5003 scope.go:117] "RemoveContainer" containerID="455bff1469c5fdbae60e33d2f9fdcc36a7531d6f5b1512eba0641e8420891546" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.299978 5003 scope.go:117] "RemoveContainer" containerID="3e03c9994a07fa7e18a42bd5c17e591979717aa51ecb5df6f1ce553e5a854fff" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.301887 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.367534 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/81193935-fcd0-4877-9d65-6155c1a888e2-rabbitmq-confd\") pod \"81193935-fcd0-4877-9d65-6155c1a888e2\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.367663 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/81193935-fcd0-4877-9d65-6155c1a888e2-pod-info\") pod \"81193935-fcd0-4877-9d65-6155c1a888e2\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.367704 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/81193935-fcd0-4877-9d65-6155c1a888e2-config-data\") pod \"81193935-fcd0-4877-9d65-6155c1a888e2\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.367741 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"81193935-fcd0-4877-9d65-6155c1a888e2\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.367772 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/81193935-fcd0-4877-9d65-6155c1a888e2-rabbitmq-tls\") pod \"81193935-fcd0-4877-9d65-6155c1a888e2\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.367817 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6t42\" (UniqueName: \"kubernetes.io/projected/81193935-fcd0-4877-9d65-6155c1a888e2-kube-api-access-x6t42\") pod \"81193935-fcd0-4877-9d65-6155c1a888e2\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.367874 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/81193935-fcd0-4877-9d65-6155c1a888e2-rabbitmq-plugins\") pod \"81193935-fcd0-4877-9d65-6155c1a888e2\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.367899 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/81193935-fcd0-4877-9d65-6155c1a888e2-erlang-cookie-secret\") pod \"81193935-fcd0-4877-9d65-6155c1a888e2\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.367930 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/81193935-fcd0-4877-9d65-6155c1a888e2-plugins-conf\") pod \"81193935-fcd0-4877-9d65-6155c1a888e2\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.367989 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/81193935-fcd0-4877-9d65-6155c1a888e2-server-conf\") pod \"81193935-fcd0-4877-9d65-6155c1a888e2\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.368150 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/81193935-fcd0-4877-9d65-6155c1a888e2-rabbitmq-erlang-cookie\") pod \"81193935-fcd0-4877-9d65-6155c1a888e2\" (UID: \"81193935-fcd0-4877-9d65-6155c1a888e2\") " Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.369430 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81193935-fcd0-4877-9d65-6155c1a888e2-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "81193935-fcd0-4877-9d65-6155c1a888e2" (UID: "81193935-fcd0-4877-9d65-6155c1a888e2"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.369466 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81193935-fcd0-4877-9d65-6155c1a888e2-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "81193935-fcd0-4877-9d65-6155c1a888e2" (UID: "81193935-fcd0-4877-9d65-6155c1a888e2"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.370932 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81193935-fcd0-4877-9d65-6155c1a888e2-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "81193935-fcd0-4877-9d65-6155c1a888e2" (UID: "81193935-fcd0-4877-9d65-6155c1a888e2"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.376577 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/81193935-fcd0-4877-9d65-6155c1a888e2-pod-info" (OuterVolumeSpecName: "pod-info") pod "81193935-fcd0-4877-9d65-6155c1a888e2" (UID: "81193935-fcd0-4877-9d65-6155c1a888e2"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.384126 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81193935-fcd0-4877-9d65-6155c1a888e2-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "81193935-fcd0-4877-9d65-6155c1a888e2" (UID: "81193935-fcd0-4877-9d65-6155c1a888e2"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.384651 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "persistence") pod "81193935-fcd0-4877-9d65-6155c1a888e2" (UID: "81193935-fcd0-4877-9d65-6155c1a888e2"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.402645 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81193935-fcd0-4877-9d65-6155c1a888e2-kube-api-access-x6t42" (OuterVolumeSpecName: "kube-api-access-x6t42") pod "81193935-fcd0-4877-9d65-6155c1a888e2" (UID: "81193935-fcd0-4877-9d65-6155c1a888e2"). InnerVolumeSpecName "kube-api-access-x6t42". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.406740 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81193935-fcd0-4877-9d65-6155c1a888e2-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "81193935-fcd0-4877-9d65-6155c1a888e2" (UID: "81193935-fcd0-4877-9d65-6155c1a888e2"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.436941 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81193935-fcd0-4877-9d65-6155c1a888e2-config-data" (OuterVolumeSpecName: "config-data") pod "81193935-fcd0-4877-9d65-6155c1a888e2" (UID: "81193935-fcd0-4877-9d65-6155c1a888e2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.445699 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81193935-fcd0-4877-9d65-6155c1a888e2-server-conf" (OuterVolumeSpecName: "server-conf") pod "81193935-fcd0-4877-9d65-6155c1a888e2" (UID: "81193935-fcd0-4877-9d65-6155c1a888e2"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.469860 5003 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/81193935-fcd0-4877-9d65-6155c1a888e2-pod-info\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.470155 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/81193935-fcd0-4877-9d65-6155c1a888e2-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.470298 5003 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.470394 5003 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/81193935-fcd0-4877-9d65-6155c1a888e2-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.470489 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6t42\" (UniqueName: \"kubernetes.io/projected/81193935-fcd0-4877-9d65-6155c1a888e2-kube-api-access-x6t42\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.470582 5003 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/81193935-fcd0-4877-9d65-6155c1a888e2-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.470687 5003 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/81193935-fcd0-4877-9d65-6155c1a888e2-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.470780 5003 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/81193935-fcd0-4877-9d65-6155c1a888e2-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.470870 5003 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/81193935-fcd0-4877-9d65-6155c1a888e2-server-conf\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.470956 5003 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/81193935-fcd0-4877-9d65-6155c1a888e2-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.512781 5003 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.524115 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81193935-fcd0-4877-9d65-6155c1a888e2-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "81193935-fcd0-4877-9d65-6155c1a888e2" (UID: "81193935-fcd0-4877-9d65-6155c1a888e2"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.573660 5003 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/81193935-fcd0-4877-9d65-6155c1a888e2-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.573700 5003 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.714262 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-h4rlr"] Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.838509 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.878676 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff44c8db-792b-491a-879a-7e1ae7717a0f-combined-ca-bundle\") pod \"ff44c8db-792b-491a-879a-7e1ae7717a0f\" (UID: \"ff44c8db-792b-491a-879a-7e1ae7717a0f\") " Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.878727 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff44c8db-792b-491a-879a-7e1ae7717a0f-config-data\") pod \"ff44c8db-792b-491a-879a-7e1ae7717a0f\" (UID: \"ff44c8db-792b-491a-879a-7e1ae7717a0f\") " Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.878793 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g5c8h\" (UniqueName: \"kubernetes.io/projected/ff44c8db-792b-491a-879a-7e1ae7717a0f-kube-api-access-g5c8h\") pod \"ff44c8db-792b-491a-879a-7e1ae7717a0f\" (UID: \"ff44c8db-792b-491a-879a-7e1ae7717a0f\") " Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.890573 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff44c8db-792b-491a-879a-7e1ae7717a0f-kube-api-access-g5c8h" (OuterVolumeSpecName: "kube-api-access-g5c8h") pod "ff44c8db-792b-491a-879a-7e1ae7717a0f" (UID: "ff44c8db-792b-491a-879a-7e1ae7717a0f"). InnerVolumeSpecName "kube-api-access-g5c8h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.903657 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff44c8db-792b-491a-879a-7e1ae7717a0f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ff44c8db-792b-491a-879a-7e1ae7717a0f" (UID: "ff44c8db-792b-491a-879a-7e1ae7717a0f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.917371 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff44c8db-792b-491a-879a-7e1ae7717a0f-config-data" (OuterVolumeSpecName: "config-data") pod "ff44c8db-792b-491a-879a-7e1ae7717a0f" (UID: "ff44c8db-792b-491a-879a-7e1ae7717a0f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.982305 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff44c8db-792b-491a-879a-7e1ae7717a0f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.983114 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff44c8db-792b-491a-879a-7e1ae7717a0f-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:29 crc kubenswrapper[5003]: I0104 12:13:29.983246 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g5c8h\" (UniqueName: \"kubernetes.io/projected/ff44c8db-792b-491a-879a-7e1ae7717a0f-kube-api-access-g5c8h\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.071233 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"81193935-fcd0-4877-9d65-6155c1a888e2","Type":"ContainerDied","Data":"fcc464258696e0f890aa3c6646f844036dc3b081256709d7f92e48e33af14189"} Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.071258 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.071348 5003 scope.go:117] "RemoveContainer" containerID="f0dca325e90af7570f19f9ac0610466deeda038b06d2c3ca9f19a6c46586b480" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.110298 5003 generic.go:334] "Generic (PLEG): container finished" podID="fce6607d-86b7-4fda-8916-b05f80feb02c" containerID="a5747e9cb1f505e6c3cda6a72f7e54d2fe14cd3004439607e6f9945468f29ab0" exitCode=0 Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.110402 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h4rlr" event={"ID":"fce6607d-86b7-4fda-8916-b05f80feb02c","Type":"ContainerDied","Data":"a5747e9cb1f505e6c3cda6a72f7e54d2fe14cd3004439607e6f9945468f29ab0"} Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.110895 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h4rlr" event={"ID":"fce6607d-86b7-4fda-8916-b05f80feb02c","Type":"ContainerStarted","Data":"091b50845e3b7b1ebb0da0139aa8e7ebaa6be89531d06b3fb803bbf6dde02e6d"} Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.113552 5003 generic.go:334] "Generic (PLEG): container finished" podID="ff44c8db-792b-491a-879a-7e1ae7717a0f" containerID="c036815a0e7e6c9f9f1d469ae796fad9bf4425d1308d4bca8f8c39aa199aae5a" exitCode=0 Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.113672 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"ff44c8db-792b-491a-879a-7e1ae7717a0f","Type":"ContainerDied","Data":"c036815a0e7e6c9f9f1d469ae796fad9bf4425d1308d4bca8f8c39aa199aae5a"} Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.113695 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.113713 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"ff44c8db-792b-491a-879a-7e1ae7717a0f","Type":"ContainerDied","Data":"ca814a1af9bf85d5efcdb5e3da6ad02d89800fe0f39da8423f9602e50147b621"} Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.118638 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fjrtq" event={"ID":"99f5999d-5397-4e91-b56d-d0d543afc2a7","Type":"ContainerStarted","Data":"b0f510e1ac9edbdad722af8e09cfe86f8a3c2a25980c59f363e03229bef3202f"} Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.122376 5003 generic.go:334] "Generic (PLEG): container finished" podID="f0dcef7e-0621-4399-b967-5d5f90dd695f" containerID="d3cbe354dea063f86f1a93a83f133720e47dd24a58d27a2d2bdf3cd839088357" exitCode=0 Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.122480 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5d455558d5-f58qc" event={"ID":"f0dcef7e-0621-4399-b967-5d5f90dd695f","Type":"ContainerDied","Data":"d3cbe354dea063f86f1a93a83f133720e47dd24a58d27a2d2bdf3cd839088357"} Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.155670 5003 scope.go:117] "RemoveContainer" containerID="13206dc80be6f8795f671b42dac3396c5e445e376c8796f74fffdbfb54487a41" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.236239 5003 scope.go:117] "RemoveContainer" containerID="c036815a0e7e6c9f9f1d469ae796fad9bf4425d1308d4bca8f8c39aa199aae5a" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.238891 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.254951 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.390179 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.397853 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.403669 5003 scope.go:117] "RemoveContainer" containerID="c036815a0e7e6c9f9f1d469ae796fad9bf4425d1308d4bca8f8c39aa199aae5a" Jan 04 12:13:30 crc kubenswrapper[5003]: E0104 12:13:30.405315 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c036815a0e7e6c9f9f1d469ae796fad9bf4425d1308d4bca8f8c39aa199aae5a\": container with ID starting with c036815a0e7e6c9f9f1d469ae796fad9bf4425d1308d4bca8f8c39aa199aae5a not found: ID does not exist" containerID="c036815a0e7e6c9f9f1d469ae796fad9bf4425d1308d4bca8f8c39aa199aae5a" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.405369 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c036815a0e7e6c9f9f1d469ae796fad9bf4425d1308d4bca8f8c39aa199aae5a"} err="failed to get container status \"c036815a0e7e6c9f9f1d469ae796fad9bf4425d1308d4bca8f8c39aa199aae5a\": rpc error: code = NotFound desc = could not find container \"c036815a0e7e6c9f9f1d469ae796fad9bf4425d1308d4bca8f8c39aa199aae5a\": container with ID starting with c036815a0e7e6c9f9f1d469ae796fad9bf4425d1308d4bca8f8c39aa199aae5a not found: ID does not exist" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.564420 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="2c899132-ee49-4ab3-89ea-95f0bfcb71ab" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.167:8776/healthcheck\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.608613 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5d455558d5-f58qc" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.691683 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-644b6c944d-sd84t" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.713534 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f0dcef7e-0621-4399-b967-5d5f90dd695f-config-data-custom\") pod \"f0dcef7e-0621-4399-b967-5d5f90dd695f\" (UID: \"f0dcef7e-0621-4399-b967-5d5f90dd695f\") " Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.713634 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0dcef7e-0621-4399-b967-5d5f90dd695f-config-data\") pod \"f0dcef7e-0621-4399-b967-5d5f90dd695f\" (UID: \"f0dcef7e-0621-4399-b967-5d5f90dd695f\") " Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.713801 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0dcef7e-0621-4399-b967-5d5f90dd695f-logs\") pod \"f0dcef7e-0621-4399-b967-5d5f90dd695f\" (UID: \"f0dcef7e-0621-4399-b967-5d5f90dd695f\") " Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.713824 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jrkkk\" (UniqueName: \"kubernetes.io/projected/f0dcef7e-0621-4399-b967-5d5f90dd695f-kube-api-access-jrkkk\") pod \"f0dcef7e-0621-4399-b967-5d5f90dd695f\" (UID: \"f0dcef7e-0621-4399-b967-5d5f90dd695f\") " Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.713871 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0dcef7e-0621-4399-b967-5d5f90dd695f-combined-ca-bundle\") pod \"f0dcef7e-0621-4399-b967-5d5f90dd695f\" (UID: \"f0dcef7e-0621-4399-b967-5d5f90dd695f\") " Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.716775 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0dcef7e-0621-4399-b967-5d5f90dd695f-logs" (OuterVolumeSpecName: "logs") pod "f0dcef7e-0621-4399-b967-5d5f90dd695f" (UID: "f0dcef7e-0621-4399-b967-5d5f90dd695f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.729239 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0dcef7e-0621-4399-b967-5d5f90dd695f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f0dcef7e-0621-4399-b967-5d5f90dd695f" (UID: "f0dcef7e-0621-4399-b967-5d5f90dd695f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.729413 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0dcef7e-0621-4399-b967-5d5f90dd695f-kube-api-access-jrkkk" (OuterVolumeSpecName: "kube-api-access-jrkkk") pod "f0dcef7e-0621-4399-b967-5d5f90dd695f" (UID: "f0dcef7e-0621-4399-b967-5d5f90dd695f"). InnerVolumeSpecName "kube-api-access-jrkkk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.774618 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0dcef7e-0621-4399-b967-5d5f90dd695f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f0dcef7e-0621-4399-b967-5d5f90dd695f" (UID: "f0dcef7e-0621-4399-b967-5d5f90dd695f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.785093 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0dcef7e-0621-4399-b967-5d5f90dd695f-config-data" (OuterVolumeSpecName: "config-data") pod "f0dcef7e-0621-4399-b967-5d5f90dd695f" (UID: "f0dcef7e-0621-4399-b967-5d5f90dd695f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.787729 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.814489 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a29676ba-4d56-4b2e-a92f-c83b5f25345a-combined-ca-bundle\") pod \"a29676ba-4d56-4b2e-a92f-c83b5f25345a\" (UID: \"a29676ba-4d56-4b2e-a92f-c83b5f25345a\") " Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.814643 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vp28l\" (UniqueName: \"kubernetes.io/projected/a29676ba-4d56-4b2e-a92f-c83b5f25345a-kube-api-access-vp28l\") pod \"a29676ba-4d56-4b2e-a92f-c83b5f25345a\" (UID: \"a29676ba-4d56-4b2e-a92f-c83b5f25345a\") " Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.814680 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a29676ba-4d56-4b2e-a92f-c83b5f25345a-config-data\") pod \"a29676ba-4d56-4b2e-a92f-c83b5f25345a\" (UID: \"a29676ba-4d56-4b2e-a92f-c83b5f25345a\") " Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.814719 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a29676ba-4d56-4b2e-a92f-c83b5f25345a-config-data-custom\") pod \"a29676ba-4d56-4b2e-a92f-c83b5f25345a\" (UID: \"a29676ba-4d56-4b2e-a92f-c83b5f25345a\") " Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.814761 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a29676ba-4d56-4b2e-a92f-c83b5f25345a-logs\") pod \"a29676ba-4d56-4b2e-a92f-c83b5f25345a\" (UID: \"a29676ba-4d56-4b2e-a92f-c83b5f25345a\") " Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.815122 5003 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f0dcef7e-0621-4399-b967-5d5f90dd695f-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.815142 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jrkkk\" (UniqueName: \"kubernetes.io/projected/f0dcef7e-0621-4399-b967-5d5f90dd695f-kube-api-access-jrkkk\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.815153 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0dcef7e-0621-4399-b967-5d5f90dd695f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.815164 5003 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f0dcef7e-0621-4399-b967-5d5f90dd695f-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.815172 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0dcef7e-0621-4399-b967-5d5f90dd695f-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.815569 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a29676ba-4d56-4b2e-a92f-c83b5f25345a-logs" (OuterVolumeSpecName: "logs") pod "a29676ba-4d56-4b2e-a92f-c83b5f25345a" (UID: "a29676ba-4d56-4b2e-a92f-c83b5f25345a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.820140 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a29676ba-4d56-4b2e-a92f-c83b5f25345a-kube-api-access-vp28l" (OuterVolumeSpecName: "kube-api-access-vp28l") pod "a29676ba-4d56-4b2e-a92f-c83b5f25345a" (UID: "a29676ba-4d56-4b2e-a92f-c83b5f25345a"). InnerVolumeSpecName "kube-api-access-vp28l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.820310 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="597b6841-5a72-4d8d-b2a6-dec279d628d0" path="/var/lib/kubelet/pods/597b6841-5a72-4d8d-b2a6-dec279d628d0/volumes" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.821158 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="653b2e11-95ca-46e7-b28c-a1170d7a180b" path="/var/lib/kubelet/pods/653b2e11-95ca-46e7-b28c-a1170d7a180b/volumes" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.821396 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a29676ba-4d56-4b2e-a92f-c83b5f25345a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a29676ba-4d56-4b2e-a92f-c83b5f25345a" (UID: "a29676ba-4d56-4b2e-a92f-c83b5f25345a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.822632 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81193935-fcd0-4877-9d65-6155c1a888e2" path="/var/lib/kubelet/pods/81193935-fcd0-4877-9d65-6155c1a888e2/volumes" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.823604 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="829003dc-aa5e-43a6-a4f5-c578c73e76d4" path="/var/lib/kubelet/pods/829003dc-aa5e-43a6-a4f5-c578c73e76d4/volumes" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.824181 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff44c8db-792b-491a-879a-7e1ae7717a0f" path="/var/lib/kubelet/pods/ff44c8db-792b-491a-879a-7e1ae7717a0f/volumes" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.843107 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a29676ba-4d56-4b2e-a92f-c83b5f25345a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a29676ba-4d56-4b2e-a92f-c83b5f25345a" (UID: "a29676ba-4d56-4b2e-a92f-c83b5f25345a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.883759 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a29676ba-4d56-4b2e-a92f-c83b5f25345a-config-data" (OuterVolumeSpecName: "config-data") pod "a29676ba-4d56-4b2e-a92f-c83b5f25345a" (UID: "a29676ba-4d56-4b2e-a92f-c83b5f25345a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.916714 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/295f88c8-79ac-463f-85e3-d98dc15dd06f-config-data\") pod \"295f88c8-79ac-463f-85e3-d98dc15dd06f\" (UID: \"295f88c8-79ac-463f-85e3-d98dc15dd06f\") " Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.917416 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-js99j\" (UniqueName: \"kubernetes.io/projected/295f88c8-79ac-463f-85e3-d98dc15dd06f-kube-api-access-js99j\") pod \"295f88c8-79ac-463f-85e3-d98dc15dd06f\" (UID: \"295f88c8-79ac-463f-85e3-d98dc15dd06f\") " Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.917458 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/295f88c8-79ac-463f-85e3-d98dc15dd06f-combined-ca-bundle\") pod \"295f88c8-79ac-463f-85e3-d98dc15dd06f\" (UID: \"295f88c8-79ac-463f-85e3-d98dc15dd06f\") " Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.917975 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a29676ba-4d56-4b2e-a92f-c83b5f25345a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.918033 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vp28l\" (UniqueName: \"kubernetes.io/projected/a29676ba-4d56-4b2e-a92f-c83b5f25345a-kube-api-access-vp28l\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.918052 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a29676ba-4d56-4b2e-a92f-c83b5f25345a-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.918065 5003 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a29676ba-4d56-4b2e-a92f-c83b5f25345a-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.918079 5003 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a29676ba-4d56-4b2e-a92f-c83b5f25345a-logs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.921171 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/295f88c8-79ac-463f-85e3-d98dc15dd06f-kube-api-access-js99j" (OuterVolumeSpecName: "kube-api-access-js99j") pod "295f88c8-79ac-463f-85e3-d98dc15dd06f" (UID: "295f88c8-79ac-463f-85e3-d98dc15dd06f"). InnerVolumeSpecName "kube-api-access-js99j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.943412 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/295f88c8-79ac-463f-85e3-d98dc15dd06f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "295f88c8-79ac-463f-85e3-d98dc15dd06f" (UID: "295f88c8-79ac-463f-85e3-d98dc15dd06f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:30 crc kubenswrapper[5003]: I0104 12:13:30.944138 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/295f88c8-79ac-463f-85e3-d98dc15dd06f-config-data" (OuterVolumeSpecName: "config-data") pod "295f88c8-79ac-463f-85e3-d98dc15dd06f" (UID: "295f88c8-79ac-463f-85e3-d98dc15dd06f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.019632 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/295f88c8-79ac-463f-85e3-d98dc15dd06f-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.019671 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-js99j\" (UniqueName: \"kubernetes.io/projected/295f88c8-79ac-463f-85e3-d98dc15dd06f-kube-api-access-js99j\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.019682 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/295f88c8-79ac-463f-85e3-d98dc15dd06f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.019747 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="b4c84d72-3209-4925-9eb2-cbebcd1e8ae7" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.207:8775/\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.019835 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="b4c84d72-3209-4925-9eb2-cbebcd1e8ae7" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.207:8775/\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.145324 5003 generic.go:334] "Generic (PLEG): container finished" podID="a29676ba-4d56-4b2e-a92f-c83b5f25345a" containerID="450844f0ccdffc6d7d907a51da1ae77807337385f064e077ac7579465c3daaf4" exitCode=0 Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.145400 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-644b6c944d-sd84t" event={"ID":"a29676ba-4d56-4b2e-a92f-c83b5f25345a","Type":"ContainerDied","Data":"450844f0ccdffc6d7d907a51da1ae77807337385f064e077ac7579465c3daaf4"} Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.145437 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-644b6c944d-sd84t" event={"ID":"a29676ba-4d56-4b2e-a92f-c83b5f25345a","Type":"ContainerDied","Data":"2ef4f78a1357e56fbf5d7f6e592a25c5326254b62061d4c1bf5e37cf68190465"} Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.145458 5003 scope.go:117] "RemoveContainer" containerID="450844f0ccdffc6d7d907a51da1ae77807337385f064e077ac7579465c3daaf4" Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.145592 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-644b6c944d-sd84t" Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.151206 5003 generic.go:334] "Generic (PLEG): container finished" podID="99f5999d-5397-4e91-b56d-d0d543afc2a7" containerID="b0f510e1ac9edbdad722af8e09cfe86f8a3c2a25980c59f363e03229bef3202f" exitCode=0 Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.151304 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fjrtq" event={"ID":"99f5999d-5397-4e91-b56d-d0d543afc2a7","Type":"ContainerDied","Data":"b0f510e1ac9edbdad722af8e09cfe86f8a3c2a25980c59f363e03229bef3202f"} Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.161112 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5d455558d5-f58qc" event={"ID":"f0dcef7e-0621-4399-b967-5d5f90dd695f","Type":"ContainerDied","Data":"1aa3d5b8500289d4b0c90a30cf8bafcab60eff06444efc230bf3add825a653d0"} Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.161282 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5d455558d5-f58qc" Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.168262 5003 generic.go:334] "Generic (PLEG): container finished" podID="295f88c8-79ac-463f-85e3-d98dc15dd06f" containerID="c3b3aa191257674d28511eb56f215bdb07ddd4b9442282756b29282013c6bebd" exitCode=0 Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.168396 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"295f88c8-79ac-463f-85e3-d98dc15dd06f","Type":"ContainerDied","Data":"c3b3aa191257674d28511eb56f215bdb07ddd4b9442282756b29282013c6bebd"} Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.168906 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"295f88c8-79ac-463f-85e3-d98dc15dd06f","Type":"ContainerDied","Data":"7ad93d37b4c75a2cf1e65358a55a2474bd3e083e077c4e4acb7eb1bd2fb602ab"} Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.169277 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.175722 5003 generic.go:334] "Generic (PLEG): container finished" podID="fce6607d-86b7-4fda-8916-b05f80feb02c" containerID="596bb766d1548d052c07f22535adb77c1bf8d03ed458c1ac4924721916d7f482" exitCode=0 Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.175796 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h4rlr" event={"ID":"fce6607d-86b7-4fda-8916-b05f80feb02c","Type":"ContainerDied","Data":"596bb766d1548d052c07f22535adb77c1bf8d03ed458c1ac4924721916d7f482"} Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.264616 5003 scope.go:117] "RemoveContainer" containerID="f25eb8eb7f0c672012e6c95abd29402fdd548aeb978e465c3f54b8838db97478" Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.287846 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.300370 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.309478 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-644b6c944d-sd84t"] Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.314269 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-644b6c944d-sd84t"] Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.326584 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-5d455558d5-f58qc"] Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.328191 5003 scope.go:117] "RemoveContainer" containerID="450844f0ccdffc6d7d907a51da1ae77807337385f064e077ac7579465c3daaf4" Jan 04 12:13:31 crc kubenswrapper[5003]: E0104 12:13:31.330609 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"450844f0ccdffc6d7d907a51da1ae77807337385f064e077ac7579465c3daaf4\": container with ID starting with 450844f0ccdffc6d7d907a51da1ae77807337385f064e077ac7579465c3daaf4 not found: ID does not exist" containerID="450844f0ccdffc6d7d907a51da1ae77807337385f064e077ac7579465c3daaf4" Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.330677 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"450844f0ccdffc6d7d907a51da1ae77807337385f064e077ac7579465c3daaf4"} err="failed to get container status \"450844f0ccdffc6d7d907a51da1ae77807337385f064e077ac7579465c3daaf4\": rpc error: code = NotFound desc = could not find container \"450844f0ccdffc6d7d907a51da1ae77807337385f064e077ac7579465c3daaf4\": container with ID starting with 450844f0ccdffc6d7d907a51da1ae77807337385f064e077ac7579465c3daaf4 not found: ID does not exist" Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.330703 5003 scope.go:117] "RemoveContainer" containerID="f25eb8eb7f0c672012e6c95abd29402fdd548aeb978e465c3f54b8838db97478" Jan 04 12:13:31 crc kubenswrapper[5003]: E0104 12:13:31.331249 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f25eb8eb7f0c672012e6c95abd29402fdd548aeb978e465c3f54b8838db97478\": container with ID starting with f25eb8eb7f0c672012e6c95abd29402fdd548aeb978e465c3f54b8838db97478 not found: ID does not exist" containerID="f25eb8eb7f0c672012e6c95abd29402fdd548aeb978e465c3f54b8838db97478" Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.331272 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f25eb8eb7f0c672012e6c95abd29402fdd548aeb978e465c3f54b8838db97478"} err="failed to get container status \"f25eb8eb7f0c672012e6c95abd29402fdd548aeb978e465c3f54b8838db97478\": rpc error: code = NotFound desc = could not find container \"f25eb8eb7f0c672012e6c95abd29402fdd548aeb978e465c3f54b8838db97478\": container with ID starting with f25eb8eb7f0c672012e6c95abd29402fdd548aeb978e465c3f54b8838db97478 not found: ID does not exist" Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.331287 5003 scope.go:117] "RemoveContainer" containerID="d3cbe354dea063f86f1a93a83f133720e47dd24a58d27a2d2bdf3cd839088357" Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.336419 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-5d455558d5-f58qc"] Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.370749 5003 scope.go:117] "RemoveContainer" containerID="b3d17d863faaeeeaa347265306da2ca818931952ebab5c074c74f4eaf33efddb" Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.390197 5003 scope.go:117] "RemoveContainer" containerID="c3b3aa191257674d28511eb56f215bdb07ddd4b9442282756b29282013c6bebd" Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.424295 5003 scope.go:117] "RemoveContainer" containerID="c3b3aa191257674d28511eb56f215bdb07ddd4b9442282756b29282013c6bebd" Jan 04 12:13:31 crc kubenswrapper[5003]: E0104 12:13:31.425155 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3b3aa191257674d28511eb56f215bdb07ddd4b9442282756b29282013c6bebd\": container with ID starting with c3b3aa191257674d28511eb56f215bdb07ddd4b9442282756b29282013c6bebd not found: ID does not exist" containerID="c3b3aa191257674d28511eb56f215bdb07ddd4b9442282756b29282013c6bebd" Jan 04 12:13:31 crc kubenswrapper[5003]: I0104 12:13:31.425235 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3b3aa191257674d28511eb56f215bdb07ddd4b9442282756b29282013c6bebd"} err="failed to get container status \"c3b3aa191257674d28511eb56f215bdb07ddd4b9442282756b29282013c6bebd\": rpc error: code = NotFound desc = could not find container \"c3b3aa191257674d28511eb56f215bdb07ddd4b9442282756b29282013c6bebd\": container with ID starting with c3b3aa191257674d28511eb56f215bdb07ddd4b9442282756b29282013c6bebd not found: ID does not exist" Jan 04 12:13:32 crc kubenswrapper[5003]: I0104 12:13:32.193328 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fjrtq" event={"ID":"99f5999d-5397-4e91-b56d-d0d543afc2a7","Type":"ContainerStarted","Data":"72954ea500a29500214e5716ad06163557fa3b617473d76231c8c872d18a26b3"} Jan 04 12:13:32 crc kubenswrapper[5003]: I0104 12:13:32.202676 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h4rlr" event={"ID":"fce6607d-86b7-4fda-8916-b05f80feb02c","Type":"ContainerStarted","Data":"146ebcbaff52c6f4b5bfd497decf8b6f7286837661acd250dc30f768f7e375ea"} Jan 04 12:13:32 crc kubenswrapper[5003]: I0104 12:13:32.220682 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fjrtq" podStartSLOduration=4.437396372 podStartE2EDuration="7.220658023s" podCreationTimestamp="2026-01-04 12:13:25 +0000 UTC" firstStartedPulling="2026-01-04 12:13:29.024237733 +0000 UTC m=+1524.497267574" lastFinishedPulling="2026-01-04 12:13:31.807499384 +0000 UTC m=+1527.280529225" observedRunningTime="2026-01-04 12:13:32.217182482 +0000 UTC m=+1527.690212353" watchObservedRunningTime="2026-01-04 12:13:32.220658023 +0000 UTC m=+1527.693687864" Jan 04 12:13:32 crc kubenswrapper[5003]: I0104 12:13:32.247655 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-h4rlr" podStartSLOduration=2.650377282 podStartE2EDuration="4.247626337s" podCreationTimestamp="2026-01-04 12:13:28 +0000 UTC" firstStartedPulling="2026-01-04 12:13:30.140658991 +0000 UTC m=+1525.613688832" lastFinishedPulling="2026-01-04 12:13:31.737908046 +0000 UTC m=+1527.210937887" observedRunningTime="2026-01-04 12:13:32.241123607 +0000 UTC m=+1527.714153448" watchObservedRunningTime="2026-01-04 12:13:32.247626337 +0000 UTC m=+1527.720656178" Jan 04 12:13:32 crc kubenswrapper[5003]: E0104 12:13:32.486849 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3 is running failed: container process not found" containerID="4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:13:32 crc kubenswrapper[5003]: E0104 12:13:32.487529 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3 is running failed: container process not found" containerID="4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:13:32 crc kubenswrapper[5003]: E0104 12:13:32.488238 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dc2a9fd5a4e5efaaf4816f8b92204d68eeef36829b12562226d86ab9c2026294" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:13:32 crc kubenswrapper[5003]: E0104 12:13:32.488379 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3 is running failed: container process not found" containerID="4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:13:32 crc kubenswrapper[5003]: E0104 12:13:32.488421 5003 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-99mjg" podUID="d91cd6f4-0e52-4519-b337-9a7c2779b7f1" containerName="ovsdb-server" Jan 04 12:13:32 crc kubenswrapper[5003]: E0104 12:13:32.492471 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dc2a9fd5a4e5efaaf4816f8b92204d68eeef36829b12562226d86ab9c2026294" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:13:32 crc kubenswrapper[5003]: E0104 12:13:32.496771 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dc2a9fd5a4e5efaaf4816f8b92204d68eeef36829b12562226d86ab9c2026294" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:13:32 crc kubenswrapper[5003]: E0104 12:13:32.496824 5003 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-99mjg" podUID="d91cd6f4-0e52-4519-b337-9a7c2779b7f1" containerName="ovs-vswitchd" Jan 04 12:13:32 crc kubenswrapper[5003]: I0104 12:13:32.815782 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="295f88c8-79ac-463f-85e3-d98dc15dd06f" path="/var/lib/kubelet/pods/295f88c8-79ac-463f-85e3-d98dc15dd06f/volumes" Jan 04 12:13:32 crc kubenswrapper[5003]: I0104 12:13:32.816526 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a29676ba-4d56-4b2e-a92f-c83b5f25345a" path="/var/lib/kubelet/pods/a29676ba-4d56-4b2e-a92f-c83b5f25345a/volumes" Jan 04 12:13:32 crc kubenswrapper[5003]: I0104 12:13:32.817155 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0dcef7e-0621-4399-b967-5d5f90dd695f" path="/var/lib/kubelet/pods/f0dcef7e-0621-4399-b967-5d5f90dd695f/volumes" Jan 04 12:13:33 crc kubenswrapper[5003]: I0104 12:13:33.216117 5003 generic.go:334] "Generic (PLEG): container finished" podID="23de8292-dc91-45db-8de9-59933352e3f2" containerID="3d08041741653f728a1dcd7a717b74f0e1152bf8a3fc8991a888bfea4968ac08" exitCode=0 Jan 04 12:13:33 crc kubenswrapper[5003]: I0104 12:13:33.216215 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7d88cc4d8f-5mhr4" event={"ID":"23de8292-dc91-45db-8de9-59933352e3f2","Type":"ContainerDied","Data":"3d08041741653f728a1dcd7a717b74f0e1152bf8a3fc8991a888bfea4968ac08"} Jan 04 12:13:33 crc kubenswrapper[5003]: I0104 12:13:33.351259 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7d88cc4d8f-5mhr4" Jan 04 12:13:33 crc kubenswrapper[5003]: I0104 12:13:33.474511 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-ovndb-tls-certs\") pod \"23de8292-dc91-45db-8de9-59933352e3f2\" (UID: \"23de8292-dc91-45db-8de9-59933352e3f2\") " Jan 04 12:13:33 crc kubenswrapper[5003]: I0104 12:13:33.474590 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qxcgv\" (UniqueName: \"kubernetes.io/projected/23de8292-dc91-45db-8de9-59933352e3f2-kube-api-access-qxcgv\") pod \"23de8292-dc91-45db-8de9-59933352e3f2\" (UID: \"23de8292-dc91-45db-8de9-59933352e3f2\") " Jan 04 12:13:33 crc kubenswrapper[5003]: I0104 12:13:33.474668 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-config\") pod \"23de8292-dc91-45db-8de9-59933352e3f2\" (UID: \"23de8292-dc91-45db-8de9-59933352e3f2\") " Jan 04 12:13:33 crc kubenswrapper[5003]: I0104 12:13:33.474694 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-internal-tls-certs\") pod \"23de8292-dc91-45db-8de9-59933352e3f2\" (UID: \"23de8292-dc91-45db-8de9-59933352e3f2\") " Jan 04 12:13:33 crc kubenswrapper[5003]: I0104 12:13:33.474753 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-public-tls-certs\") pod \"23de8292-dc91-45db-8de9-59933352e3f2\" (UID: \"23de8292-dc91-45db-8de9-59933352e3f2\") " Jan 04 12:13:33 crc kubenswrapper[5003]: I0104 12:13:33.474774 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-httpd-config\") pod \"23de8292-dc91-45db-8de9-59933352e3f2\" (UID: \"23de8292-dc91-45db-8de9-59933352e3f2\") " Jan 04 12:13:33 crc kubenswrapper[5003]: I0104 12:13:33.474829 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-combined-ca-bundle\") pod \"23de8292-dc91-45db-8de9-59933352e3f2\" (UID: \"23de8292-dc91-45db-8de9-59933352e3f2\") " Jan 04 12:13:33 crc kubenswrapper[5003]: I0104 12:13:33.482558 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "23de8292-dc91-45db-8de9-59933352e3f2" (UID: "23de8292-dc91-45db-8de9-59933352e3f2"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:33 crc kubenswrapper[5003]: I0104 12:13:33.512927 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23de8292-dc91-45db-8de9-59933352e3f2-kube-api-access-qxcgv" (OuterVolumeSpecName: "kube-api-access-qxcgv") pod "23de8292-dc91-45db-8de9-59933352e3f2" (UID: "23de8292-dc91-45db-8de9-59933352e3f2"). InnerVolumeSpecName "kube-api-access-qxcgv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:33 crc kubenswrapper[5003]: I0104 12:13:33.527698 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "23de8292-dc91-45db-8de9-59933352e3f2" (UID: "23de8292-dc91-45db-8de9-59933352e3f2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:33 crc kubenswrapper[5003]: I0104 12:13:33.533785 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-config" (OuterVolumeSpecName: "config") pod "23de8292-dc91-45db-8de9-59933352e3f2" (UID: "23de8292-dc91-45db-8de9-59933352e3f2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:33 crc kubenswrapper[5003]: I0104 12:13:33.546637 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "23de8292-dc91-45db-8de9-59933352e3f2" (UID: "23de8292-dc91-45db-8de9-59933352e3f2"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:33 crc kubenswrapper[5003]: I0104 12:13:33.547736 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "23de8292-dc91-45db-8de9-59933352e3f2" (UID: "23de8292-dc91-45db-8de9-59933352e3f2"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:33 crc kubenswrapper[5003]: I0104 12:13:33.577462 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:33 crc kubenswrapper[5003]: I0104 12:13:33.577499 5003 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:33 crc kubenswrapper[5003]: I0104 12:13:33.577512 5003 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:33 crc kubenswrapper[5003]: I0104 12:13:33.577521 5003 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:33 crc kubenswrapper[5003]: I0104 12:13:33.577533 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:33 crc kubenswrapper[5003]: I0104 12:13:33.577544 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qxcgv\" (UniqueName: \"kubernetes.io/projected/23de8292-dc91-45db-8de9-59933352e3f2-kube-api-access-qxcgv\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:33 crc kubenswrapper[5003]: I0104 12:13:33.590189 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "23de8292-dc91-45db-8de9-59933352e3f2" (UID: "23de8292-dc91-45db-8de9-59933352e3f2"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:13:33 crc kubenswrapper[5003]: I0104 12:13:33.679716 5003 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/23de8292-dc91-45db-8de9-59933352e3f2-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:34 crc kubenswrapper[5003]: I0104 12:13:34.246479 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7d88cc4d8f-5mhr4" event={"ID":"23de8292-dc91-45db-8de9-59933352e3f2","Type":"ContainerDied","Data":"3a5a5bf263a0abaa533ab10ae25924784d2ab8bae9187c77b0bc156a44ef4ba6"} Jan 04 12:13:34 crc kubenswrapper[5003]: I0104 12:13:34.246935 5003 scope.go:117] "RemoveContainer" containerID="312b6e09c8b9a68b781c0ec705d9f1a75feca3742a105d128ddcfa936c23e624" Jan 04 12:13:34 crc kubenswrapper[5003]: I0104 12:13:34.247210 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7d88cc4d8f-5mhr4" Jan 04 12:13:34 crc kubenswrapper[5003]: I0104 12:13:34.323903 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7d88cc4d8f-5mhr4"] Jan 04 12:13:34 crc kubenswrapper[5003]: I0104 12:13:34.331988 5003 scope.go:117] "RemoveContainer" containerID="3d08041741653f728a1dcd7a717b74f0e1152bf8a3fc8991a888bfea4968ac08" Jan 04 12:13:34 crc kubenswrapper[5003]: I0104 12:13:34.343366 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-7d88cc4d8f-5mhr4"] Jan 04 12:13:34 crc kubenswrapper[5003]: I0104 12:13:34.821167 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23de8292-dc91-45db-8de9-59933352e3f2" path="/var/lib/kubelet/pods/23de8292-dc91-45db-8de9-59933352e3f2/volumes" Jan 04 12:13:36 crc kubenswrapper[5003]: I0104 12:13:36.754540 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fjrtq" Jan 04 12:13:36 crc kubenswrapper[5003]: I0104 12:13:36.760266 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fjrtq" Jan 04 12:13:36 crc kubenswrapper[5003]: I0104 12:13:36.829262 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fjrtq" Jan 04 12:13:37 crc kubenswrapper[5003]: I0104 12:13:37.369941 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fjrtq" Jan 04 12:13:37 crc kubenswrapper[5003]: E0104 12:13:37.487060 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3 is running failed: container process not found" containerID="4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:13:37 crc kubenswrapper[5003]: E0104 12:13:37.487764 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3 is running failed: container process not found" containerID="4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:13:37 crc kubenswrapper[5003]: E0104 12:13:37.488000 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dc2a9fd5a4e5efaaf4816f8b92204d68eeef36829b12562226d86ab9c2026294" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:13:37 crc kubenswrapper[5003]: E0104 12:13:37.489429 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3 is running failed: container process not found" containerID="4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:13:37 crc kubenswrapper[5003]: E0104 12:13:37.489778 5003 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-99mjg" podUID="d91cd6f4-0e52-4519-b337-9a7c2779b7f1" containerName="ovsdb-server" Jan 04 12:13:37 crc kubenswrapper[5003]: E0104 12:13:37.490511 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dc2a9fd5a4e5efaaf4816f8b92204d68eeef36829b12562226d86ab9c2026294" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:13:37 crc kubenswrapper[5003]: E0104 12:13:37.492493 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dc2a9fd5a4e5efaaf4816f8b92204d68eeef36829b12562226d86ab9c2026294" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:13:37 crc kubenswrapper[5003]: E0104 12:13:37.492522 5003 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-99mjg" podUID="d91cd6f4-0e52-4519-b337-9a7c2779b7f1" containerName="ovs-vswitchd" Jan 04 12:13:37 crc kubenswrapper[5003]: I0104 12:13:37.569813 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fjrtq"] Jan 04 12:13:39 crc kubenswrapper[5003]: I0104 12:13:39.117075 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-h4rlr" Jan 04 12:13:39 crc kubenswrapper[5003]: I0104 12:13:39.117481 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-h4rlr" Jan 04 12:13:39 crc kubenswrapper[5003]: I0104 12:13:39.167248 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-h4rlr" Jan 04 12:13:39 crc kubenswrapper[5003]: I0104 12:13:39.313958 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-fjrtq" podUID="99f5999d-5397-4e91-b56d-d0d543afc2a7" containerName="registry-server" containerID="cri-o://72954ea500a29500214e5716ad06163557fa3b617473d76231c8c872d18a26b3" gracePeriod=2 Jan 04 12:13:39 crc kubenswrapper[5003]: I0104 12:13:39.370001 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-h4rlr" Jan 04 12:13:39 crc kubenswrapper[5003]: I0104 12:13:39.418615 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:13:39 crc kubenswrapper[5003]: I0104 12:13:39.418684 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:13:39 crc kubenswrapper[5003]: I0104 12:13:39.964618 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-h4rlr"] Jan 04 12:13:40 crc kubenswrapper[5003]: I0104 12:13:40.328847 5003 generic.go:334] "Generic (PLEG): container finished" podID="99f5999d-5397-4e91-b56d-d0d543afc2a7" containerID="72954ea500a29500214e5716ad06163557fa3b617473d76231c8c872d18a26b3" exitCode=0 Jan 04 12:13:40 crc kubenswrapper[5003]: I0104 12:13:40.328947 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fjrtq" event={"ID":"99f5999d-5397-4e91-b56d-d0d543afc2a7","Type":"ContainerDied","Data":"72954ea500a29500214e5716ad06163557fa3b617473d76231c8c872d18a26b3"} Jan 04 12:13:41 crc kubenswrapper[5003]: I0104 12:13:41.119585 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fjrtq" Jan 04 12:13:41 crc kubenswrapper[5003]: I0104 12:13:41.212578 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wpmd4\" (UniqueName: \"kubernetes.io/projected/99f5999d-5397-4e91-b56d-d0d543afc2a7-kube-api-access-wpmd4\") pod \"99f5999d-5397-4e91-b56d-d0d543afc2a7\" (UID: \"99f5999d-5397-4e91-b56d-d0d543afc2a7\") " Jan 04 12:13:41 crc kubenswrapper[5003]: I0104 12:13:41.212705 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99f5999d-5397-4e91-b56d-d0d543afc2a7-catalog-content\") pod \"99f5999d-5397-4e91-b56d-d0d543afc2a7\" (UID: \"99f5999d-5397-4e91-b56d-d0d543afc2a7\") " Jan 04 12:13:41 crc kubenswrapper[5003]: I0104 12:13:41.212820 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99f5999d-5397-4e91-b56d-d0d543afc2a7-utilities\") pod \"99f5999d-5397-4e91-b56d-d0d543afc2a7\" (UID: \"99f5999d-5397-4e91-b56d-d0d543afc2a7\") " Jan 04 12:13:41 crc kubenswrapper[5003]: I0104 12:13:41.213658 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99f5999d-5397-4e91-b56d-d0d543afc2a7-utilities" (OuterVolumeSpecName: "utilities") pod "99f5999d-5397-4e91-b56d-d0d543afc2a7" (UID: "99f5999d-5397-4e91-b56d-d0d543afc2a7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:13:41 crc kubenswrapper[5003]: I0104 12:13:41.218490 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99f5999d-5397-4e91-b56d-d0d543afc2a7-kube-api-access-wpmd4" (OuterVolumeSpecName: "kube-api-access-wpmd4") pod "99f5999d-5397-4e91-b56d-d0d543afc2a7" (UID: "99f5999d-5397-4e91-b56d-d0d543afc2a7"). InnerVolumeSpecName "kube-api-access-wpmd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:41 crc kubenswrapper[5003]: I0104 12:13:41.265718 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99f5999d-5397-4e91-b56d-d0d543afc2a7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "99f5999d-5397-4e91-b56d-d0d543afc2a7" (UID: "99f5999d-5397-4e91-b56d-d0d543afc2a7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:13:41 crc kubenswrapper[5003]: I0104 12:13:41.314016 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99f5999d-5397-4e91-b56d-d0d543afc2a7-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:41 crc kubenswrapper[5003]: I0104 12:13:41.314086 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99f5999d-5397-4e91-b56d-d0d543afc2a7-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:41 crc kubenswrapper[5003]: I0104 12:13:41.314096 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wpmd4\" (UniqueName: \"kubernetes.io/projected/99f5999d-5397-4e91-b56d-d0d543afc2a7-kube-api-access-wpmd4\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:41 crc kubenswrapper[5003]: I0104 12:13:41.338279 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fjrtq" event={"ID":"99f5999d-5397-4e91-b56d-d0d543afc2a7","Type":"ContainerDied","Data":"966807ae8519bff3763321c5298e896817952acc60ff7db8630e2f508424f490"} Jan 04 12:13:41 crc kubenswrapper[5003]: I0104 12:13:41.338349 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fjrtq" Jan 04 12:13:41 crc kubenswrapper[5003]: I0104 12:13:41.338350 5003 scope.go:117] "RemoveContainer" containerID="72954ea500a29500214e5716ad06163557fa3b617473d76231c8c872d18a26b3" Jan 04 12:13:41 crc kubenswrapper[5003]: I0104 12:13:41.338464 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-h4rlr" podUID="fce6607d-86b7-4fda-8916-b05f80feb02c" containerName="registry-server" containerID="cri-o://146ebcbaff52c6f4b5bfd497decf8b6f7286837661acd250dc30f768f7e375ea" gracePeriod=2 Jan 04 12:13:41 crc kubenswrapper[5003]: I0104 12:13:41.370400 5003 scope.go:117] "RemoveContainer" containerID="b0f510e1ac9edbdad722af8e09cfe86f8a3c2a25980c59f363e03229bef3202f" Jan 04 12:13:41 crc kubenswrapper[5003]: I0104 12:13:41.384149 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fjrtq"] Jan 04 12:13:41 crc kubenswrapper[5003]: I0104 12:13:41.389647 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-fjrtq"] Jan 04 12:13:41 crc kubenswrapper[5003]: I0104 12:13:41.419800 5003 scope.go:117] "RemoveContainer" containerID="dded6312c3b00f6ad27902f106b80005667129cc552570a994c6b4381e994a6f" Jan 04 12:13:42 crc kubenswrapper[5003]: I0104 12:13:42.352649 5003 generic.go:334] "Generic (PLEG): container finished" podID="fce6607d-86b7-4fda-8916-b05f80feb02c" containerID="146ebcbaff52c6f4b5bfd497decf8b6f7286837661acd250dc30f768f7e375ea" exitCode=0 Jan 04 12:13:42 crc kubenswrapper[5003]: I0104 12:13:42.352724 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h4rlr" event={"ID":"fce6607d-86b7-4fda-8916-b05f80feb02c","Type":"ContainerDied","Data":"146ebcbaff52c6f4b5bfd497decf8b6f7286837661acd250dc30f768f7e375ea"} Jan 04 12:13:42 crc kubenswrapper[5003]: E0104 12:13:42.485752 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3 is running failed: container process not found" containerID="4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:13:42 crc kubenswrapper[5003]: E0104 12:13:42.486195 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3 is running failed: container process not found" containerID="4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:13:42 crc kubenswrapper[5003]: E0104 12:13:42.486454 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3 is running failed: container process not found" containerID="4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:13:42 crc kubenswrapper[5003]: E0104 12:13:42.486493 5003 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-99mjg" podUID="d91cd6f4-0e52-4519-b337-9a7c2779b7f1" containerName="ovsdb-server" Jan 04 12:13:42 crc kubenswrapper[5003]: E0104 12:13:42.487600 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dc2a9fd5a4e5efaaf4816f8b92204d68eeef36829b12562226d86ab9c2026294" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:13:42 crc kubenswrapper[5003]: E0104 12:13:42.490470 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dc2a9fd5a4e5efaaf4816f8b92204d68eeef36829b12562226d86ab9c2026294" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:13:42 crc kubenswrapper[5003]: E0104 12:13:42.492542 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dc2a9fd5a4e5efaaf4816f8b92204d68eeef36829b12562226d86ab9c2026294" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:13:42 crc kubenswrapper[5003]: E0104 12:13:42.492624 5003 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-99mjg" podUID="d91cd6f4-0e52-4519-b337-9a7c2779b7f1" containerName="ovs-vswitchd" Jan 04 12:13:42 crc kubenswrapper[5003]: I0104 12:13:42.669519 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h4rlr" Jan 04 12:13:42 crc kubenswrapper[5003]: I0104 12:13:42.735350 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fce6607d-86b7-4fda-8916-b05f80feb02c-utilities\") pod \"fce6607d-86b7-4fda-8916-b05f80feb02c\" (UID: \"fce6607d-86b7-4fda-8916-b05f80feb02c\") " Jan 04 12:13:42 crc kubenswrapper[5003]: I0104 12:13:42.735549 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fce6607d-86b7-4fda-8916-b05f80feb02c-catalog-content\") pod \"fce6607d-86b7-4fda-8916-b05f80feb02c\" (UID: \"fce6607d-86b7-4fda-8916-b05f80feb02c\") " Jan 04 12:13:42 crc kubenswrapper[5003]: I0104 12:13:42.735613 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ppkx9\" (UniqueName: \"kubernetes.io/projected/fce6607d-86b7-4fda-8916-b05f80feb02c-kube-api-access-ppkx9\") pod \"fce6607d-86b7-4fda-8916-b05f80feb02c\" (UID: \"fce6607d-86b7-4fda-8916-b05f80feb02c\") " Jan 04 12:13:42 crc kubenswrapper[5003]: I0104 12:13:42.736351 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fce6607d-86b7-4fda-8916-b05f80feb02c-utilities" (OuterVolumeSpecName: "utilities") pod "fce6607d-86b7-4fda-8916-b05f80feb02c" (UID: "fce6607d-86b7-4fda-8916-b05f80feb02c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:13:42 crc kubenswrapper[5003]: I0104 12:13:42.743815 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fce6607d-86b7-4fda-8916-b05f80feb02c-kube-api-access-ppkx9" (OuterVolumeSpecName: "kube-api-access-ppkx9") pod "fce6607d-86b7-4fda-8916-b05f80feb02c" (UID: "fce6607d-86b7-4fda-8916-b05f80feb02c"). InnerVolumeSpecName "kube-api-access-ppkx9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:42 crc kubenswrapper[5003]: I0104 12:13:42.759997 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fce6607d-86b7-4fda-8916-b05f80feb02c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fce6607d-86b7-4fda-8916-b05f80feb02c" (UID: "fce6607d-86b7-4fda-8916-b05f80feb02c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:13:42 crc kubenswrapper[5003]: I0104 12:13:42.814703 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99f5999d-5397-4e91-b56d-d0d543afc2a7" path="/var/lib/kubelet/pods/99f5999d-5397-4e91-b56d-d0d543afc2a7/volumes" Jan 04 12:13:42 crc kubenswrapper[5003]: I0104 12:13:42.837333 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fce6607d-86b7-4fda-8916-b05f80feb02c-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:42 crc kubenswrapper[5003]: I0104 12:13:42.837365 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fce6607d-86b7-4fda-8916-b05f80feb02c-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:42 crc kubenswrapper[5003]: I0104 12:13:42.837377 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ppkx9\" (UniqueName: \"kubernetes.io/projected/fce6607d-86b7-4fda-8916-b05f80feb02c-kube-api-access-ppkx9\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:43 crc kubenswrapper[5003]: I0104 12:13:43.369864 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-h4rlr" event={"ID":"fce6607d-86b7-4fda-8916-b05f80feb02c","Type":"ContainerDied","Data":"091b50845e3b7b1ebb0da0139aa8e7ebaa6be89531d06b3fb803bbf6dde02e6d"} Jan 04 12:13:43 crc kubenswrapper[5003]: I0104 12:13:43.370358 5003 scope.go:117] "RemoveContainer" containerID="146ebcbaff52c6f4b5bfd497decf8b6f7286837661acd250dc30f768f7e375ea" Jan 04 12:13:43 crc kubenswrapper[5003]: I0104 12:13:43.369905 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-h4rlr" Jan 04 12:13:43 crc kubenswrapper[5003]: I0104 12:13:43.410725 5003 scope.go:117] "RemoveContainer" containerID="596bb766d1548d052c07f22535adb77c1bf8d03ed458c1ac4924721916d7f482" Jan 04 12:13:43 crc kubenswrapper[5003]: I0104 12:13:43.418107 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-h4rlr"] Jan 04 12:13:43 crc kubenswrapper[5003]: I0104 12:13:43.441217 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-h4rlr"] Jan 04 12:13:43 crc kubenswrapper[5003]: I0104 12:13:43.450958 5003 scope.go:117] "RemoveContainer" containerID="a5747e9cb1f505e6c3cda6a72f7e54d2fe14cd3004439607e6f9945468f29ab0" Jan 04 12:13:44 crc kubenswrapper[5003]: I0104 12:13:44.824588 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fce6607d-86b7-4fda-8916-b05f80feb02c" path="/var/lib/kubelet/pods/fce6607d-86b7-4fda-8916-b05f80feb02c/volumes" Jan 04 12:13:47 crc kubenswrapper[5003]: E0104 12:13:47.486111 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3 is running failed: container process not found" containerID="4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:13:47 crc kubenswrapper[5003]: E0104 12:13:47.487149 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3 is running failed: container process not found" containerID="4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:13:47 crc kubenswrapper[5003]: E0104 12:13:47.487674 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3 is running failed: container process not found" containerID="4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 04 12:13:47 crc kubenswrapper[5003]: E0104 12:13:47.487740 5003 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-99mjg" podUID="d91cd6f4-0e52-4519-b337-9a7c2779b7f1" containerName="ovsdb-server" Jan 04 12:13:47 crc kubenswrapper[5003]: E0104 12:13:47.490459 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dc2a9fd5a4e5efaaf4816f8b92204d68eeef36829b12562226d86ab9c2026294" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:13:47 crc kubenswrapper[5003]: E0104 12:13:47.493154 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dc2a9fd5a4e5efaaf4816f8b92204d68eeef36829b12562226d86ab9c2026294" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:13:47 crc kubenswrapper[5003]: E0104 12:13:47.495868 5003 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dc2a9fd5a4e5efaaf4816f8b92204d68eeef36829b12562226d86ab9c2026294" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 04 12:13:47 crc kubenswrapper[5003]: E0104 12:13:47.495924 5003 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-99mjg" podUID="d91cd6f4-0e52-4519-b337-9a7c2779b7f1" containerName="ovs-vswitchd" Jan 04 12:13:50 crc kubenswrapper[5003]: I0104 12:13:50.461910 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-99mjg_d91cd6f4-0e52-4519-b337-9a7c2779b7f1/ovs-vswitchd/0.log" Jan 04 12:13:50 crc kubenswrapper[5003]: I0104 12:13:50.464042 5003 generic.go:334] "Generic (PLEG): container finished" podID="d91cd6f4-0e52-4519-b337-9a7c2779b7f1" containerID="dc2a9fd5a4e5efaaf4816f8b92204d68eeef36829b12562226d86ab9c2026294" exitCode=137 Jan 04 12:13:50 crc kubenswrapper[5003]: I0104 12:13:50.464113 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-99mjg" event={"ID":"d91cd6f4-0e52-4519-b337-9a7c2779b7f1","Type":"ContainerDied","Data":"dc2a9fd5a4e5efaaf4816f8b92204d68eeef36829b12562226d86ab9c2026294"} Jan 04 12:13:50 crc kubenswrapper[5003]: I0104 12:13:50.464155 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-99mjg" event={"ID":"d91cd6f4-0e52-4519-b337-9a7c2779b7f1","Type":"ContainerDied","Data":"65eef8a07780b0e8e1ea149cf9c58ad22e62b06a8c2156fa8870f5b7decb667d"} Jan 04 12:13:50 crc kubenswrapper[5003]: I0104 12:13:50.464172 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="65eef8a07780b0e8e1ea149cf9c58ad22e62b06a8c2156fa8870f5b7decb667d" Jan 04 12:13:50 crc kubenswrapper[5003]: I0104 12:13:50.503488 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-99mjg_d91cd6f4-0e52-4519-b337-9a7c2779b7f1/ovs-vswitchd/0.log" Jan 04 12:13:50 crc kubenswrapper[5003]: I0104 12:13:50.504766 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-99mjg" Jan 04 12:13:50 crc kubenswrapper[5003]: I0104 12:13:50.574477 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-var-log\") pod \"d91cd6f4-0e52-4519-b337-9a7c2779b7f1\" (UID: \"d91cd6f4-0e52-4519-b337-9a7c2779b7f1\") " Jan 04 12:13:50 crc kubenswrapper[5003]: I0104 12:13:50.574617 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-var-log" (OuterVolumeSpecName: "var-log") pod "d91cd6f4-0e52-4519-b337-9a7c2779b7f1" (UID: "d91cd6f4-0e52-4519-b337-9a7c2779b7f1"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:13:50 crc kubenswrapper[5003]: I0104 12:13:50.574690 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-var-run\") pod \"d91cd6f4-0e52-4519-b337-9a7c2779b7f1\" (UID: \"d91cd6f4-0e52-4519-b337-9a7c2779b7f1\") " Jan 04 12:13:50 crc kubenswrapper[5003]: I0104 12:13:50.574753 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-var-run" (OuterVolumeSpecName: "var-run") pod "d91cd6f4-0e52-4519-b337-9a7c2779b7f1" (UID: "d91cd6f4-0e52-4519-b337-9a7c2779b7f1"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:13:50 crc kubenswrapper[5003]: I0104 12:13:50.574763 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-scripts\") pod \"d91cd6f4-0e52-4519-b337-9a7c2779b7f1\" (UID: \"d91cd6f4-0e52-4519-b337-9a7c2779b7f1\") " Jan 04 12:13:50 crc kubenswrapper[5003]: I0104 12:13:50.574858 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-var-lib\") pod \"d91cd6f4-0e52-4519-b337-9a7c2779b7f1\" (UID: \"d91cd6f4-0e52-4519-b337-9a7c2779b7f1\") " Jan 04 12:13:50 crc kubenswrapper[5003]: I0104 12:13:50.574931 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-var-lib" (OuterVolumeSpecName: "var-lib") pod "d91cd6f4-0e52-4519-b337-9a7c2779b7f1" (UID: "d91cd6f4-0e52-4519-b337-9a7c2779b7f1"). InnerVolumeSpecName "var-lib". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:13:50 crc kubenswrapper[5003]: I0104 12:13:50.574973 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fbjbn\" (UniqueName: \"kubernetes.io/projected/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-kube-api-access-fbjbn\") pod \"d91cd6f4-0e52-4519-b337-9a7c2779b7f1\" (UID: \"d91cd6f4-0e52-4519-b337-9a7c2779b7f1\") " Jan 04 12:13:50 crc kubenswrapper[5003]: I0104 12:13:50.575041 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-etc-ovs\") pod \"d91cd6f4-0e52-4519-b337-9a7c2779b7f1\" (UID: \"d91cd6f4-0e52-4519-b337-9a7c2779b7f1\") " Jan 04 12:13:50 crc kubenswrapper[5003]: I0104 12:13:50.575190 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-etc-ovs" (OuterVolumeSpecName: "etc-ovs") pod "d91cd6f4-0e52-4519-b337-9a7c2779b7f1" (UID: "d91cd6f4-0e52-4519-b337-9a7c2779b7f1"). InnerVolumeSpecName "etc-ovs". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 12:13:50 crc kubenswrapper[5003]: I0104 12:13:50.575448 5003 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-var-log\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:50 crc kubenswrapper[5003]: I0104 12:13:50.575482 5003 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-var-run\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:50 crc kubenswrapper[5003]: I0104 12:13:50.575498 5003 reconciler_common.go:293] "Volume detached for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-var-lib\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:50 crc kubenswrapper[5003]: I0104 12:13:50.575514 5003 reconciler_common.go:293] "Volume detached for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-etc-ovs\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:50 crc kubenswrapper[5003]: I0104 12:13:50.576159 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-scripts" (OuterVolumeSpecName: "scripts") pod "d91cd6f4-0e52-4519-b337-9a7c2779b7f1" (UID: "d91cd6f4-0e52-4519-b337-9a7c2779b7f1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:13:50 crc kubenswrapper[5003]: I0104 12:13:50.584580 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-kube-api-access-fbjbn" (OuterVolumeSpecName: "kube-api-access-fbjbn") pod "d91cd6f4-0e52-4519-b337-9a7c2779b7f1" (UID: "d91cd6f4-0e52-4519-b337-9a7c2779b7f1"). InnerVolumeSpecName "kube-api-access-fbjbn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:50 crc kubenswrapper[5003]: I0104 12:13:50.677085 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fbjbn\" (UniqueName: \"kubernetes.io/projected/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-kube-api-access-fbjbn\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:50 crc kubenswrapper[5003]: I0104 12:13:50.677128 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d91cd6f4-0e52-4519-b337-9a7c2779b7f1-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:51 crc kubenswrapper[5003]: I0104 12:13:51.486149 5003 generic.go:334] "Generic (PLEG): container finished" podID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerID="1ff1b1d3338eb5bfcb21f80e2c4c2e6ad020fb1d499df31a3c24c44755e46e60" exitCode=137 Jan 04 12:13:51 crc kubenswrapper[5003]: I0104 12:13:51.486251 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-99mjg" Jan 04 12:13:51 crc kubenswrapper[5003]: I0104 12:13:51.486253 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e387635d-9ef2-4b1d-9303-0d762e8b282c","Type":"ContainerDied","Data":"1ff1b1d3338eb5bfcb21f80e2c4c2e6ad020fb1d499df31a3c24c44755e46e60"} Jan 04 12:13:51 crc kubenswrapper[5003]: I0104 12:13:51.509972 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-99mjg"] Jan 04 12:13:51 crc kubenswrapper[5003]: I0104 12:13:51.518223 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-ovs-99mjg"] Jan 04 12:13:51 crc kubenswrapper[5003]: I0104 12:13:51.602442 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Jan 04 12:13:51 crc kubenswrapper[5003]: I0104 12:13:51.692237 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vdjv8\" (UniqueName: \"kubernetes.io/projected/e387635d-9ef2-4b1d-9303-0d762e8b282c-kube-api-access-vdjv8\") pod \"e387635d-9ef2-4b1d-9303-0d762e8b282c\" (UID: \"e387635d-9ef2-4b1d-9303-0d762e8b282c\") " Jan 04 12:13:51 crc kubenswrapper[5003]: I0104 12:13:51.692552 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/e387635d-9ef2-4b1d-9303-0d762e8b282c-cache\") pod \"e387635d-9ef2-4b1d-9303-0d762e8b282c\" (UID: \"e387635d-9ef2-4b1d-9303-0d762e8b282c\") " Jan 04 12:13:51 crc kubenswrapper[5003]: I0104 12:13:51.692665 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/e387635d-9ef2-4b1d-9303-0d762e8b282c-lock\") pod \"e387635d-9ef2-4b1d-9303-0d762e8b282c\" (UID: \"e387635d-9ef2-4b1d-9303-0d762e8b282c\") " Jan 04 12:13:51 crc kubenswrapper[5003]: I0104 12:13:51.692872 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e387635d-9ef2-4b1d-9303-0d762e8b282c-etc-swift\") pod \"e387635d-9ef2-4b1d-9303-0d762e8b282c\" (UID: \"e387635d-9ef2-4b1d-9303-0d762e8b282c\") " Jan 04 12:13:51 crc kubenswrapper[5003]: I0104 12:13:51.692938 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"e387635d-9ef2-4b1d-9303-0d762e8b282c\" (UID: \"e387635d-9ef2-4b1d-9303-0d762e8b282c\") " Jan 04 12:13:51 crc kubenswrapper[5003]: I0104 12:13:51.693082 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e387635d-9ef2-4b1d-9303-0d762e8b282c-lock" (OuterVolumeSpecName: "lock") pod "e387635d-9ef2-4b1d-9303-0d762e8b282c" (UID: "e387635d-9ef2-4b1d-9303-0d762e8b282c"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:13:51 crc kubenswrapper[5003]: I0104 12:13:51.693354 5003 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/e387635d-9ef2-4b1d-9303-0d762e8b282c-lock\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:51 crc kubenswrapper[5003]: I0104 12:13:51.693685 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e387635d-9ef2-4b1d-9303-0d762e8b282c-cache" (OuterVolumeSpecName: "cache") pod "e387635d-9ef2-4b1d-9303-0d762e8b282c" (UID: "e387635d-9ef2-4b1d-9303-0d762e8b282c"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:13:51 crc kubenswrapper[5003]: I0104 12:13:51.696004 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "swift") pod "e387635d-9ef2-4b1d-9303-0d762e8b282c" (UID: "e387635d-9ef2-4b1d-9303-0d762e8b282c"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 04 12:13:51 crc kubenswrapper[5003]: I0104 12:13:51.696051 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e387635d-9ef2-4b1d-9303-0d762e8b282c-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "e387635d-9ef2-4b1d-9303-0d762e8b282c" (UID: "e387635d-9ef2-4b1d-9303-0d762e8b282c"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:51 crc kubenswrapper[5003]: I0104 12:13:51.696459 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e387635d-9ef2-4b1d-9303-0d762e8b282c-kube-api-access-vdjv8" (OuterVolumeSpecName: "kube-api-access-vdjv8") pod "e387635d-9ef2-4b1d-9303-0d762e8b282c" (UID: "e387635d-9ef2-4b1d-9303-0d762e8b282c"). InnerVolumeSpecName "kube-api-access-vdjv8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:13:51 crc kubenswrapper[5003]: I0104 12:13:51.795164 5003 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e387635d-9ef2-4b1d-9303-0d762e8b282c-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:51 crc kubenswrapper[5003]: I0104 12:13:51.795211 5003 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 04 12:13:51 crc kubenswrapper[5003]: I0104 12:13:51.795225 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vdjv8\" (UniqueName: \"kubernetes.io/projected/e387635d-9ef2-4b1d-9303-0d762e8b282c-kube-api-access-vdjv8\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:51 crc kubenswrapper[5003]: I0104 12:13:51.795235 5003 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/e387635d-9ef2-4b1d-9303-0d762e8b282c-cache\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:51 crc kubenswrapper[5003]: I0104 12:13:51.809309 5003 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 04 12:13:51 crc kubenswrapper[5003]: I0104 12:13:51.896568 5003 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 04 12:13:52 crc kubenswrapper[5003]: I0104 12:13:52.503290 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e387635d-9ef2-4b1d-9303-0d762e8b282c","Type":"ContainerDied","Data":"b6d95a1fc2bafb5486a724dc2fd9f03f5f51c484f76ffa2e8427f9be9b2205d8"} Jan 04 12:13:52 crc kubenswrapper[5003]: I0104 12:13:52.503385 5003 scope.go:117] "RemoveContainer" containerID="1ff1b1d3338eb5bfcb21f80e2c4c2e6ad020fb1d499df31a3c24c44755e46e60" Jan 04 12:13:52 crc kubenswrapper[5003]: I0104 12:13:52.503514 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Jan 04 12:13:52 crc kubenswrapper[5003]: I0104 12:13:52.527751 5003 scope.go:117] "RemoveContainer" containerID="0f945f657e49547a265f6f68bbeab5475213ad26ac43900d9363d069d96d532d" Jan 04 12:13:52 crc kubenswrapper[5003]: I0104 12:13:52.555633 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Jan 04 12:13:52 crc kubenswrapper[5003]: I0104 12:13:52.557626 5003 scope.go:117] "RemoveContainer" containerID="ed59910d0aea135dbd4ee19aeda59d078e4feebb5abcaf39f16948cd769ad0c6" Jan 04 12:13:52 crc kubenswrapper[5003]: I0104 12:13:52.564562 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-storage-0"] Jan 04 12:13:52 crc kubenswrapper[5003]: I0104 12:13:52.578687 5003 scope.go:117] "RemoveContainer" containerID="10f362bf62129cfc68ea5030dc9a630a990d022be1874d460b3a1e2b97c3806b" Jan 04 12:13:52 crc kubenswrapper[5003]: I0104 12:13:52.598296 5003 scope.go:117] "RemoveContainer" containerID="c7da6298aedb8336c663f0f72ed02aee2693e470451ea9ed4c2506018c7c3b8b" Jan 04 12:13:52 crc kubenswrapper[5003]: I0104 12:13:52.613274 5003 scope.go:117] "RemoveContainer" containerID="c02d818e96b369de068228bec08edd738f084527048823b6a9f1dc73d5473513" Jan 04 12:13:52 crc kubenswrapper[5003]: I0104 12:13:52.628072 5003 scope.go:117] "RemoveContainer" containerID="f2aa7e67c73edf19a995e58eaf9b8785bfd532521dc609235e9c65097cf71384" Jan 04 12:13:52 crc kubenswrapper[5003]: I0104 12:13:52.642941 5003 scope.go:117] "RemoveContainer" containerID="95da8a5d432f4fe64060441f956785bb4f966684b7334a245346aec06e1cf140" Jan 04 12:13:52 crc kubenswrapper[5003]: I0104 12:13:52.658747 5003 scope.go:117] "RemoveContainer" containerID="0f48ea6f8d2e18984ca5443a03a04938c8b360cd16c1c6815b02f2fb373f0a8e" Jan 04 12:13:52 crc kubenswrapper[5003]: I0104 12:13:52.679874 5003 scope.go:117] "RemoveContainer" containerID="f0aaa591de01ea442a981d1ec695614335ad33f8028cf7d9ef5da12021491ff0" Jan 04 12:13:52 crc kubenswrapper[5003]: I0104 12:13:52.703445 5003 scope.go:117] "RemoveContainer" containerID="dc00a97e29c22bf9a2a36c4af6d0c30fdef5266c6b5c76c89cee2d2f47cd401e" Jan 04 12:13:52 crc kubenswrapper[5003]: I0104 12:13:52.720792 5003 scope.go:117] "RemoveContainer" containerID="707399f94576a05d1029f0ca7a930546bdc46ba6b8a66a7f7d5123ee7b10547b" Jan 04 12:13:52 crc kubenswrapper[5003]: I0104 12:13:52.740790 5003 scope.go:117] "RemoveContainer" containerID="8d2a2c4e9a22b4fe3fd7b40f0290eeae57fd3d5fa8a0b12d022f40ed1d9de1ab" Jan 04 12:13:52 crc kubenswrapper[5003]: I0104 12:13:52.759776 5003 scope.go:117] "RemoveContainer" containerID="9859f2a44803d2a1d464de1de41f2bfd77e5f0896ae37ca5e574d6ba7d0b8491" Jan 04 12:13:52 crc kubenswrapper[5003]: I0104 12:13:52.778335 5003 scope.go:117] "RemoveContainer" containerID="8a065f77698f6ac06fadadaa5b0b12a9e635a05f7a2fd3ab7f7457eb16357d7d" Jan 04 12:13:52 crc kubenswrapper[5003]: I0104 12:13:52.815908 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d91cd6f4-0e52-4519-b337-9a7c2779b7f1" path="/var/lib/kubelet/pods/d91cd6f4-0e52-4519-b337-9a7c2779b7f1/volumes" Jan 04 12:13:52 crc kubenswrapper[5003]: I0104 12:13:52.816774 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" path="/var/lib/kubelet/pods/e387635d-9ef2-4b1d-9303-0d762e8b282c/volumes" Jan 04 12:14:09 crc kubenswrapper[5003]: I0104 12:14:09.418328 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:14:09 crc kubenswrapper[5003]: I0104 12:14:09.419130 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:14:09 crc kubenswrapper[5003]: I0104 12:14:09.419190 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 12:14:09 crc kubenswrapper[5003]: I0104 12:14:09.419864 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"83b49e76a6bec0f9f75f0fc3c25e68790cc09e4dc277f44d33d51646ab219821"} pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 12:14:09 crc kubenswrapper[5003]: I0104 12:14:09.419924 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" containerID="cri-o://83b49e76a6bec0f9f75f0fc3c25e68790cc09e4dc277f44d33d51646ab219821" gracePeriod=600 Jan 04 12:14:09 crc kubenswrapper[5003]: E0104 12:14:09.544805 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:14:09 crc kubenswrapper[5003]: I0104 12:14:09.709051 5003 generic.go:334] "Generic (PLEG): container finished" podID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerID="83b49e76a6bec0f9f75f0fc3c25e68790cc09e4dc277f44d33d51646ab219821" exitCode=0 Jan 04 12:14:09 crc kubenswrapper[5003]: I0104 12:14:09.709070 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerDied","Data":"83b49e76a6bec0f9f75f0fc3c25e68790cc09e4dc277f44d33d51646ab219821"} Jan 04 12:14:09 crc kubenswrapper[5003]: I0104 12:14:09.709232 5003 scope.go:117] "RemoveContainer" containerID="69783d40bb0d702bd7a771a35f8a0c04b3ee78e8c80ee725ec241cde3249b382" Jan 04 12:14:09 crc kubenswrapper[5003]: I0104 12:14:09.709975 5003 scope.go:117] "RemoveContainer" containerID="83b49e76a6bec0f9f75f0fc3c25e68790cc09e4dc277f44d33d51646ab219821" Jan 04 12:14:09 crc kubenswrapper[5003]: E0104 12:14:09.710349 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:14:09 crc kubenswrapper[5003]: I0104 12:14:09.777795 5003 scope.go:117] "RemoveContainer" containerID="dc2a9fd5a4e5efaaf4816f8b92204d68eeef36829b12562226d86ab9c2026294" Jan 04 12:14:09 crc kubenswrapper[5003]: I0104 12:14:09.802390 5003 scope.go:117] "RemoveContainer" containerID="4c0075059a61646e6cc3dee1b7df5275337889beb02e9327bdb79a1f602b18e3" Jan 04 12:14:09 crc kubenswrapper[5003]: I0104 12:14:09.833819 5003 scope.go:117] "RemoveContainer" containerID="7141902b674c0aba97218ef22d6317c2792d0dbefb479c305e2f864785706754" Jan 04 12:14:09 crc kubenswrapper[5003]: I0104 12:14:09.864354 5003 scope.go:117] "RemoveContainer" containerID="e03d33b1c2f582207ad79b8ffd1fb1bc6fb9b3a8c8fbdcaf0b8afe9429602a63" Jan 04 12:14:09 crc kubenswrapper[5003]: I0104 12:14:09.897499 5003 scope.go:117] "RemoveContainer" containerID="2de21e3795685f92c0995b8c0275e374995fb3d1feddd1208205a49fb45022bc" Jan 04 12:14:22 crc kubenswrapper[5003]: I0104 12:14:22.809131 5003 scope.go:117] "RemoveContainer" containerID="83b49e76a6bec0f9f75f0fc3c25e68790cc09e4dc277f44d33d51646ab219821" Jan 04 12:14:22 crc kubenswrapper[5003]: E0104 12:14:22.810102 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:14:36 crc kubenswrapper[5003]: I0104 12:14:36.807613 5003 scope.go:117] "RemoveContainer" containerID="83b49e76a6bec0f9f75f0fc3c25e68790cc09e4dc277f44d33d51646ab219821" Jan 04 12:14:36 crc kubenswrapper[5003]: E0104 12:14:36.808927 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:14:50 crc kubenswrapper[5003]: I0104 12:14:50.807136 5003 scope.go:117] "RemoveContainer" containerID="83b49e76a6bec0f9f75f0fc3c25e68790cc09e4dc277f44d33d51646ab219821" Jan 04 12:14:50 crc kubenswrapper[5003]: E0104 12:14:50.807989 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.155918 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458815-74tvc"] Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.156984 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="container-auditor" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157003 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="container-auditor" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157050 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="object-replicator" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157063 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="object-replicator" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157082 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="container-updater" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157091 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="container-updater" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157107 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="account-replicator" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157116 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="account-replicator" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157125 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fce6607d-86b7-4fda-8916-b05f80feb02c" containerName="extract-utilities" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157134 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="fce6607d-86b7-4fda-8916-b05f80feb02c" containerName="extract-utilities" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157147 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff44c8db-792b-491a-879a-7e1ae7717a0f" containerName="nova-cell1-conductor-conductor" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157156 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff44c8db-792b-491a-879a-7e1ae7717a0f" containerName="nova-cell1-conductor-conductor" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157171 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="rsync" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157179 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="rsync" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157189 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a29676ba-4d56-4b2e-a92f-c83b5f25345a" containerName="barbican-keystone-listener-log" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157198 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="a29676ba-4d56-4b2e-a92f-c83b5f25345a" containerName="barbican-keystone-listener-log" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157211 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="account-auditor" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157220 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="account-auditor" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157237 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="object-auditor" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157245 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="object-auditor" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157254 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="295f88c8-79ac-463f-85e3-d98dc15dd06f" containerName="nova-scheduler-scheduler" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157263 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="295f88c8-79ac-463f-85e3-d98dc15dd06f" containerName="nova-scheduler-scheduler" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157272 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a29676ba-4d56-4b2e-a92f-c83b5f25345a" containerName="barbican-keystone-listener" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157280 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="a29676ba-4d56-4b2e-a92f-c83b5f25345a" containerName="barbican-keystone-listener" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157296 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23de8292-dc91-45db-8de9-59933352e3f2" containerName="neutron-httpd" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157304 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="23de8292-dc91-45db-8de9-59933352e3f2" containerName="neutron-httpd" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157316 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="object-expirer" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157326 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="object-expirer" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157339 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d91cd6f4-0e52-4519-b337-9a7c2779b7f1" containerName="ovsdb-server" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157347 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d91cd6f4-0e52-4519-b337-9a7c2779b7f1" containerName="ovsdb-server" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157360 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99f5999d-5397-4e91-b56d-d0d543afc2a7" containerName="extract-content" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157369 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="99f5999d-5397-4e91-b56d-d0d543afc2a7" containerName="extract-content" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157383 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99f5999d-5397-4e91-b56d-d0d543afc2a7" containerName="registry-server" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157396 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="99f5999d-5397-4e91-b56d-d0d543afc2a7" containerName="registry-server" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157409 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="account-reaper" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157416 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="account-reaper" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157430 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d91cd6f4-0e52-4519-b337-9a7c2779b7f1" containerName="ovs-vswitchd" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157438 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d91cd6f4-0e52-4519-b337-9a7c2779b7f1" containerName="ovs-vswitchd" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157449 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fce6607d-86b7-4fda-8916-b05f80feb02c" containerName="extract-content" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157459 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="fce6607d-86b7-4fda-8916-b05f80feb02c" containerName="extract-content" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157470 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99f5999d-5397-4e91-b56d-d0d543afc2a7" containerName="extract-utilities" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157479 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="99f5999d-5397-4e91-b56d-d0d543afc2a7" containerName="extract-utilities" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157491 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fce6607d-86b7-4fda-8916-b05f80feb02c" containerName="registry-server" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157499 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="fce6607d-86b7-4fda-8916-b05f80feb02c" containerName="registry-server" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157512 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81193935-fcd0-4877-9d65-6155c1a888e2" containerName="rabbitmq" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157521 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="81193935-fcd0-4877-9d65-6155c1a888e2" containerName="rabbitmq" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157579 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="container-server" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157589 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="container-server" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157603 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="swift-recon-cron" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157611 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="swift-recon-cron" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157631 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="object-server" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157640 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="object-server" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157652 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d91cd6f4-0e52-4519-b337-9a7c2779b7f1" containerName="ovsdb-server-init" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157660 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d91cd6f4-0e52-4519-b337-9a7c2779b7f1" containerName="ovsdb-server-init" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157671 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81193935-fcd0-4877-9d65-6155c1a888e2" containerName="setup-container" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157680 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="81193935-fcd0-4877-9d65-6155c1a888e2" containerName="setup-container" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157693 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="object-updater" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157701 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="object-updater" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157715 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0dcef7e-0621-4399-b967-5d5f90dd695f" containerName="barbican-worker" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157724 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0dcef7e-0621-4399-b967-5d5f90dd695f" containerName="barbican-worker" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157740 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="container-replicator" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157749 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="container-replicator" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157760 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="account-server" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157768 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="account-server" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157783 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23de8292-dc91-45db-8de9-59933352e3f2" containerName="neutron-api" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157793 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="23de8292-dc91-45db-8de9-59933352e3f2" containerName="neutron-api" Jan 04 12:15:00 crc kubenswrapper[5003]: E0104 12:15:00.157808 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0dcef7e-0621-4399-b967-5d5f90dd695f" containerName="barbican-worker-log" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157817 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0dcef7e-0621-4399-b967-5d5f90dd695f" containerName="barbican-worker-log" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.157999 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="account-reaper" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.158037 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="object-expirer" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.158050 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="account-auditor" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.158063 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="container-updater" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.158077 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="23de8292-dc91-45db-8de9-59933352e3f2" containerName="neutron-httpd" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.158087 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="account-replicator" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.158097 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="object-replicator" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.158105 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="295f88c8-79ac-463f-85e3-d98dc15dd06f" containerName="nova-scheduler-scheduler" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.158119 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="container-server" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.158135 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="container-replicator" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.158144 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff44c8db-792b-491a-879a-7e1ae7717a0f" containerName="nova-cell1-conductor-conductor" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.158156 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0dcef7e-0621-4399-b967-5d5f90dd695f" containerName="barbican-worker-log" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.158170 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="81193935-fcd0-4877-9d65-6155c1a888e2" containerName="rabbitmq" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.158179 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="swift-recon-cron" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.158189 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="fce6607d-86b7-4fda-8916-b05f80feb02c" containerName="registry-server" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.158200 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="object-auditor" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.158210 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d91cd6f4-0e52-4519-b337-9a7c2779b7f1" containerName="ovsdb-server" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.158221 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="23de8292-dc91-45db-8de9-59933352e3f2" containerName="neutron-api" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.158231 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d91cd6f4-0e52-4519-b337-9a7c2779b7f1" containerName="ovs-vswitchd" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.158283 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="account-server" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.158293 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="99f5999d-5397-4e91-b56d-d0d543afc2a7" containerName="registry-server" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.158301 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="rsync" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.158313 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="container-auditor" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.158321 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="a29676ba-4d56-4b2e-a92f-c83b5f25345a" containerName="barbican-keystone-listener-log" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.158435 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="a29676ba-4d56-4b2e-a92f-c83b5f25345a" containerName="barbican-keystone-listener" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.158450 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="object-server" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.158461 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e387635d-9ef2-4b1d-9303-0d762e8b282c" containerName="object-updater" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.158472 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0dcef7e-0621-4399-b967-5d5f90dd695f" containerName="barbican-worker" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.159460 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-74tvc" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.163056 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.163097 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.171669 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458815-74tvc"] Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.242535 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/72ac7f8a-3e0b-4784-98c7-30a6a7306a77-config-volume\") pod \"collect-profiles-29458815-74tvc\" (UID: \"72ac7f8a-3e0b-4784-98c7-30a6a7306a77\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-74tvc" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.242955 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/72ac7f8a-3e0b-4784-98c7-30a6a7306a77-secret-volume\") pod \"collect-profiles-29458815-74tvc\" (UID: \"72ac7f8a-3e0b-4784-98c7-30a6a7306a77\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-74tvc" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.243055 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6t5cr\" (UniqueName: \"kubernetes.io/projected/72ac7f8a-3e0b-4784-98c7-30a6a7306a77-kube-api-access-6t5cr\") pod \"collect-profiles-29458815-74tvc\" (UID: \"72ac7f8a-3e0b-4784-98c7-30a6a7306a77\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-74tvc" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.344366 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6t5cr\" (UniqueName: \"kubernetes.io/projected/72ac7f8a-3e0b-4784-98c7-30a6a7306a77-kube-api-access-6t5cr\") pod \"collect-profiles-29458815-74tvc\" (UID: \"72ac7f8a-3e0b-4784-98c7-30a6a7306a77\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-74tvc" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.344729 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/72ac7f8a-3e0b-4784-98c7-30a6a7306a77-config-volume\") pod \"collect-profiles-29458815-74tvc\" (UID: \"72ac7f8a-3e0b-4784-98c7-30a6a7306a77\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-74tvc" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.344857 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/72ac7f8a-3e0b-4784-98c7-30a6a7306a77-secret-volume\") pod \"collect-profiles-29458815-74tvc\" (UID: \"72ac7f8a-3e0b-4784-98c7-30a6a7306a77\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-74tvc" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.345859 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/72ac7f8a-3e0b-4784-98c7-30a6a7306a77-config-volume\") pod \"collect-profiles-29458815-74tvc\" (UID: \"72ac7f8a-3e0b-4784-98c7-30a6a7306a77\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-74tvc" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.351720 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/72ac7f8a-3e0b-4784-98c7-30a6a7306a77-secret-volume\") pod \"collect-profiles-29458815-74tvc\" (UID: \"72ac7f8a-3e0b-4784-98c7-30a6a7306a77\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-74tvc" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.374836 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6t5cr\" (UniqueName: \"kubernetes.io/projected/72ac7f8a-3e0b-4784-98c7-30a6a7306a77-kube-api-access-6t5cr\") pod \"collect-profiles-29458815-74tvc\" (UID: \"72ac7f8a-3e0b-4784-98c7-30a6a7306a77\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-74tvc" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.481339 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-74tvc" Jan 04 12:15:00 crc kubenswrapper[5003]: I0104 12:15:00.934543 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458815-74tvc"] Jan 04 12:15:01 crc kubenswrapper[5003]: I0104 12:15:01.296831 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-74tvc" event={"ID":"72ac7f8a-3e0b-4784-98c7-30a6a7306a77","Type":"ContainerStarted","Data":"e061e1ada410b974cc6e9261e36d1193b4ede8b9844a0fe82b4c5711247be3b1"} Jan 04 12:15:01 crc kubenswrapper[5003]: I0104 12:15:01.298354 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-74tvc" event={"ID":"72ac7f8a-3e0b-4784-98c7-30a6a7306a77","Type":"ContainerStarted","Data":"3250cb6b905682300ed4b1a091091e8963b598b6f8ef04f48868c6a518c44f30"} Jan 04 12:15:01 crc kubenswrapper[5003]: I0104 12:15:01.323171 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-74tvc" podStartSLOduration=1.323150692 podStartE2EDuration="1.323150692s" podCreationTimestamp="2026-01-04 12:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 12:15:01.321837258 +0000 UTC m=+1616.794867119" watchObservedRunningTime="2026-01-04 12:15:01.323150692 +0000 UTC m=+1616.796180543" Jan 04 12:15:02 crc kubenswrapper[5003]: I0104 12:15:02.307546 5003 generic.go:334] "Generic (PLEG): container finished" podID="72ac7f8a-3e0b-4784-98c7-30a6a7306a77" containerID="e061e1ada410b974cc6e9261e36d1193b4ede8b9844a0fe82b4c5711247be3b1" exitCode=0 Jan 04 12:15:02 crc kubenswrapper[5003]: I0104 12:15:02.307898 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-74tvc" event={"ID":"72ac7f8a-3e0b-4784-98c7-30a6a7306a77","Type":"ContainerDied","Data":"e061e1ada410b974cc6e9261e36d1193b4ede8b9844a0fe82b4c5711247be3b1"} Jan 04 12:15:02 crc kubenswrapper[5003]: I0104 12:15:02.807959 5003 scope.go:117] "RemoveContainer" containerID="83b49e76a6bec0f9f75f0fc3c25e68790cc09e4dc277f44d33d51646ab219821" Jan 04 12:15:02 crc kubenswrapper[5003]: E0104 12:15:02.808218 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:15:03 crc kubenswrapper[5003]: I0104 12:15:03.610823 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-74tvc" Jan 04 12:15:03 crc kubenswrapper[5003]: I0104 12:15:03.720322 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/72ac7f8a-3e0b-4784-98c7-30a6a7306a77-config-volume\") pod \"72ac7f8a-3e0b-4784-98c7-30a6a7306a77\" (UID: \"72ac7f8a-3e0b-4784-98c7-30a6a7306a77\") " Jan 04 12:15:03 crc kubenswrapper[5003]: I0104 12:15:03.721204 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72ac7f8a-3e0b-4784-98c7-30a6a7306a77-config-volume" (OuterVolumeSpecName: "config-volume") pod "72ac7f8a-3e0b-4784-98c7-30a6a7306a77" (UID: "72ac7f8a-3e0b-4784-98c7-30a6a7306a77"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:15:03 crc kubenswrapper[5003]: I0104 12:15:03.721282 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/72ac7f8a-3e0b-4784-98c7-30a6a7306a77-secret-volume\") pod \"72ac7f8a-3e0b-4784-98c7-30a6a7306a77\" (UID: \"72ac7f8a-3e0b-4784-98c7-30a6a7306a77\") " Jan 04 12:15:03 crc kubenswrapper[5003]: I0104 12:15:03.721411 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6t5cr\" (UniqueName: \"kubernetes.io/projected/72ac7f8a-3e0b-4784-98c7-30a6a7306a77-kube-api-access-6t5cr\") pod \"72ac7f8a-3e0b-4784-98c7-30a6a7306a77\" (UID: \"72ac7f8a-3e0b-4784-98c7-30a6a7306a77\") " Jan 04 12:15:03 crc kubenswrapper[5003]: I0104 12:15:03.722335 5003 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/72ac7f8a-3e0b-4784-98c7-30a6a7306a77-config-volume\") on node \"crc\" DevicePath \"\"" Jan 04 12:15:03 crc kubenswrapper[5003]: I0104 12:15:03.733394 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72ac7f8a-3e0b-4784-98c7-30a6a7306a77-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "72ac7f8a-3e0b-4784-98c7-30a6a7306a77" (UID: "72ac7f8a-3e0b-4784-98c7-30a6a7306a77"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:15:03 crc kubenswrapper[5003]: I0104 12:15:03.733664 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72ac7f8a-3e0b-4784-98c7-30a6a7306a77-kube-api-access-6t5cr" (OuterVolumeSpecName: "kube-api-access-6t5cr") pod "72ac7f8a-3e0b-4784-98c7-30a6a7306a77" (UID: "72ac7f8a-3e0b-4784-98c7-30a6a7306a77"). InnerVolumeSpecName "kube-api-access-6t5cr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:15:03 crc kubenswrapper[5003]: I0104 12:15:03.824212 5003 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/72ac7f8a-3e0b-4784-98c7-30a6a7306a77-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 04 12:15:03 crc kubenswrapper[5003]: I0104 12:15:03.824279 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6t5cr\" (UniqueName: \"kubernetes.io/projected/72ac7f8a-3e0b-4784-98c7-30a6a7306a77-kube-api-access-6t5cr\") on node \"crc\" DevicePath \"\"" Jan 04 12:15:04 crc kubenswrapper[5003]: I0104 12:15:04.327630 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-74tvc" event={"ID":"72ac7f8a-3e0b-4784-98c7-30a6a7306a77","Type":"ContainerDied","Data":"3250cb6b905682300ed4b1a091091e8963b598b6f8ef04f48868c6a518c44f30"} Jan 04 12:15:04 crc kubenswrapper[5003]: I0104 12:15:04.327700 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3250cb6b905682300ed4b1a091091e8963b598b6f8ef04f48868c6a518c44f30" Jan 04 12:15:04 crc kubenswrapper[5003]: I0104 12:15:04.327751 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458815-74tvc" Jan 04 12:15:10 crc kubenswrapper[5003]: I0104 12:15:10.652793 5003 scope.go:117] "RemoveContainer" containerID="17b7b9bcee83dae61356b58e517a5e5f714e66ccb04fc94b80e784c8833fac4c" Jan 04 12:15:10 crc kubenswrapper[5003]: I0104 12:15:10.698995 5003 scope.go:117] "RemoveContainer" containerID="5865ae8b8b354c2f62178946a54a6353e637106245eee816faa47c3f85562bc5" Jan 04 12:15:10 crc kubenswrapper[5003]: I0104 12:15:10.719263 5003 scope.go:117] "RemoveContainer" containerID="74f56090caf846ec9a4ae90e2fdb0c91222039cc17a504b6b4f074cee442f187" Jan 04 12:15:10 crc kubenswrapper[5003]: I0104 12:15:10.752344 5003 scope.go:117] "RemoveContainer" containerID="d181d379a15a1197fb320c3fc361a12c4273449906a8f258ef951dd0bbad98a7" Jan 04 12:15:10 crc kubenswrapper[5003]: I0104 12:15:10.776944 5003 scope.go:117] "RemoveContainer" containerID="7ea838e99edb97328d68fecabf90f7f29f977b43e387a289ba0a3d36b7ff84a8" Jan 04 12:15:10 crc kubenswrapper[5003]: I0104 12:15:10.800927 5003 scope.go:117] "RemoveContainer" containerID="3f15ce4ee52e76ea9afea8573da314348cf1483913042535c37b49fe9a30a4de" Jan 04 12:15:10 crc kubenswrapper[5003]: I0104 12:15:10.831710 5003 scope.go:117] "RemoveContainer" containerID="09f7d9229fc706dc8bc36a4fe5df19bdd166223a864e15c95d9d84e6a9cbf92a" Jan 04 12:15:10 crc kubenswrapper[5003]: I0104 12:15:10.860940 5003 scope.go:117] "RemoveContainer" containerID="c823c59a3d914aa9851a47e8f6dff4a4dbfb9007cd526ae9c0b90ac1c3009130" Jan 04 12:15:10 crc kubenswrapper[5003]: I0104 12:15:10.882945 5003 scope.go:117] "RemoveContainer" containerID="ec2ccb2a7819df6465119db8e29c6514123f5c5f6fac83c9f35268a3120edafb" Jan 04 12:15:10 crc kubenswrapper[5003]: I0104 12:15:10.917988 5003 scope.go:117] "RemoveContainer" containerID="e81fb75cb09615116c6e41b45884e31ed54d53e90ad9d7239b96cc08c9c39dc0" Jan 04 12:15:10 crc kubenswrapper[5003]: I0104 12:15:10.949215 5003 scope.go:117] "RemoveContainer" containerID="518da8e529d9f48a5912c26b7dbf54ba4ec156e0b77d21fc2b8ed1024cf45d99" Jan 04 12:15:10 crc kubenswrapper[5003]: I0104 12:15:10.981511 5003 scope.go:117] "RemoveContainer" containerID="41ebcad121f9a4824ae310763b6db5ab85e6345eca024178476fc82f67538ebe" Jan 04 12:15:11 crc kubenswrapper[5003]: I0104 12:15:11.024969 5003 scope.go:117] "RemoveContainer" containerID="9f2a1056d6dc7247e01e260f80883de2a110e9f1decf54f7559a543884cd14d8" Jan 04 12:15:11 crc kubenswrapper[5003]: I0104 12:15:11.054659 5003 scope.go:117] "RemoveContainer" containerID="8005247d3ecf421639fe3cce12ad4b6ea9304418c4fac60baaa23389f71401c7" Jan 04 12:15:11 crc kubenswrapper[5003]: I0104 12:15:11.073723 5003 scope.go:117] "RemoveContainer" containerID="566da8ce745337c540dde9867ce8455893f00050b6234d3463ef536df3a979bc" Jan 04 12:15:11 crc kubenswrapper[5003]: I0104 12:15:11.107971 5003 scope.go:117] "RemoveContainer" containerID="64406d146472e06022b356b2502a9a5cf00a7d3f689122719d04f157d1ad2893" Jan 04 12:15:11 crc kubenswrapper[5003]: I0104 12:15:11.130694 5003 scope.go:117] "RemoveContainer" containerID="5bb101871bdcd0c3e3c7a985d0c07f6658b8187319508b5024f9eedf1dfb8867" Jan 04 12:15:11 crc kubenswrapper[5003]: I0104 12:15:11.154928 5003 scope.go:117] "RemoveContainer" containerID="afa5dc27ad8f20e3388351b3b757361a6afaac2252ca35c4ed585e28ce8564d3" Jan 04 12:15:16 crc kubenswrapper[5003]: I0104 12:15:16.807672 5003 scope.go:117] "RemoveContainer" containerID="83b49e76a6bec0f9f75f0fc3c25e68790cc09e4dc277f44d33d51646ab219821" Jan 04 12:15:16 crc kubenswrapper[5003]: E0104 12:15:16.808377 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:15:31 crc kubenswrapper[5003]: I0104 12:15:31.807730 5003 scope.go:117] "RemoveContainer" containerID="83b49e76a6bec0f9f75f0fc3c25e68790cc09e4dc277f44d33d51646ab219821" Jan 04 12:15:31 crc kubenswrapper[5003]: E0104 12:15:31.809346 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:15:46 crc kubenswrapper[5003]: I0104 12:15:46.807560 5003 scope.go:117] "RemoveContainer" containerID="83b49e76a6bec0f9f75f0fc3c25e68790cc09e4dc277f44d33d51646ab219821" Jan 04 12:15:46 crc kubenswrapper[5003]: E0104 12:15:46.809118 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:16:01 crc kubenswrapper[5003]: I0104 12:16:01.806801 5003 scope.go:117] "RemoveContainer" containerID="83b49e76a6bec0f9f75f0fc3c25e68790cc09e4dc277f44d33d51646ab219821" Jan 04 12:16:01 crc kubenswrapper[5003]: E0104 12:16:01.807464 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:16:11 crc kubenswrapper[5003]: I0104 12:16:11.501985 5003 scope.go:117] "RemoveContainer" containerID="7c4f0d8d5cf985bf4872ff6425b0a428694655251809cbbd7b02ebfb5aeb3a85" Jan 04 12:16:11 crc kubenswrapper[5003]: I0104 12:16:11.534228 5003 scope.go:117] "RemoveContainer" containerID="7693732c8315b49159779b4dc18c5fe05484d703617b9730d466a74d1b0cfa18" Jan 04 12:16:11 crc kubenswrapper[5003]: I0104 12:16:11.588830 5003 scope.go:117] "RemoveContainer" containerID="1ba8bece994985b44dcf19ff5cc432386d9da5fb3a4f2e1cfa6a0f8ee7427c14" Jan 04 12:16:11 crc kubenswrapper[5003]: I0104 12:16:11.621368 5003 scope.go:117] "RemoveContainer" containerID="b60119d81be9fc4f99fe8bd185446cae74b10c1f7ded466e547d20803cd8d95d" Jan 04 12:16:11 crc kubenswrapper[5003]: I0104 12:16:11.662495 5003 scope.go:117] "RemoveContainer" containerID="226223174f46da10dbf31ee136700ad731638fcc1c5e819f7db0525babaa70a9" Jan 04 12:16:11 crc kubenswrapper[5003]: I0104 12:16:11.714219 5003 scope.go:117] "RemoveContainer" containerID="d88452cdae7133f339534ef3179cba669a43103313b0c64bdcd2e27bb4949757" Jan 04 12:16:11 crc kubenswrapper[5003]: I0104 12:16:11.778204 5003 scope.go:117] "RemoveContainer" containerID="e1001d955f14cbfea06a5f428203b4c75630684120b8c0116be5dc27fc4cb0e6" Jan 04 12:16:11 crc kubenswrapper[5003]: I0104 12:16:11.803536 5003 scope.go:117] "RemoveContainer" containerID="3d6ec01453584d54c02c4502ffaf6568f5d140cf80440af350ba37f5fa541403" Jan 04 12:16:11 crc kubenswrapper[5003]: I0104 12:16:11.842888 5003 scope.go:117] "RemoveContainer" containerID="e10ea013e6db1ec608b48d073081c9a1e3b0565542f8409e8438d07885d6f975" Jan 04 12:16:11 crc kubenswrapper[5003]: I0104 12:16:11.862340 5003 scope.go:117] "RemoveContainer" containerID="f420016660caf2d2efbf8ef172ccd25f86d71e13a069ec2cfd1f3fddb218bbeb" Jan 04 12:16:14 crc kubenswrapper[5003]: I0104 12:16:14.813926 5003 scope.go:117] "RemoveContainer" containerID="83b49e76a6bec0f9f75f0fc3c25e68790cc09e4dc277f44d33d51646ab219821" Jan 04 12:16:14 crc kubenswrapper[5003]: E0104 12:16:14.814453 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:16:27 crc kubenswrapper[5003]: I0104 12:16:27.806833 5003 scope.go:117] "RemoveContainer" containerID="83b49e76a6bec0f9f75f0fc3c25e68790cc09e4dc277f44d33d51646ab219821" Jan 04 12:16:27 crc kubenswrapper[5003]: E0104 12:16:27.807518 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:16:41 crc kubenswrapper[5003]: I0104 12:16:41.808411 5003 scope.go:117] "RemoveContainer" containerID="83b49e76a6bec0f9f75f0fc3c25e68790cc09e4dc277f44d33d51646ab219821" Jan 04 12:16:41 crc kubenswrapper[5003]: E0104 12:16:41.809572 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:16:52 crc kubenswrapper[5003]: I0104 12:16:52.807093 5003 scope.go:117] "RemoveContainer" containerID="83b49e76a6bec0f9f75f0fc3c25e68790cc09e4dc277f44d33d51646ab219821" Jan 04 12:16:52 crc kubenswrapper[5003]: E0104 12:16:52.808232 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:17:05 crc kubenswrapper[5003]: I0104 12:17:05.806622 5003 scope.go:117] "RemoveContainer" containerID="83b49e76a6bec0f9f75f0fc3c25e68790cc09e4dc277f44d33d51646ab219821" Jan 04 12:17:05 crc kubenswrapper[5003]: E0104 12:17:05.807582 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:17:11 crc kubenswrapper[5003]: I0104 12:17:11.989419 5003 scope.go:117] "RemoveContainer" containerID="221aee83cc0b9561b233e31890dcf7f9cf62d89ea0a4dba0e02cb11f163d72c5" Jan 04 12:17:12 crc kubenswrapper[5003]: I0104 12:17:12.019249 5003 scope.go:117] "RemoveContainer" containerID="a804f524ad9a1085cf02a12ec20839c0a9904ba8eea3edaee560b0567459045c" Jan 04 12:17:12 crc kubenswrapper[5003]: I0104 12:17:12.049841 5003 scope.go:117] "RemoveContainer" containerID="43613f0b2d9f36694ac1cf9dbb6e8fad1792484c1daf571f046d4122100383cb" Jan 04 12:17:12 crc kubenswrapper[5003]: I0104 12:17:12.081967 5003 scope.go:117] "RemoveContainer" containerID="209b2744b2890a8d622a4e6eeef6ed901fe9df1453dae9e89661d1909b82f9c9" Jan 04 12:17:12 crc kubenswrapper[5003]: I0104 12:17:12.108048 5003 scope.go:117] "RemoveContainer" containerID="c550f3bba36226c67dd1137b1b3ff5c808da2e64272379394c767b8103a8b79b" Jan 04 12:17:12 crc kubenswrapper[5003]: I0104 12:17:12.137396 5003 scope.go:117] "RemoveContainer" containerID="ea94fc65feb70c04941c0d5486b524d70238803b24fc9b2c553adec3e7a57ef2" Jan 04 12:17:12 crc kubenswrapper[5003]: I0104 12:17:12.199170 5003 scope.go:117] "RemoveContainer" containerID="9bf6d9666779e59a21fef3698f78ddafaa3c265c28b2d8fff1ebaadcb3a4f960" Jan 04 12:17:20 crc kubenswrapper[5003]: I0104 12:17:20.808075 5003 scope.go:117] "RemoveContainer" containerID="83b49e76a6bec0f9f75f0fc3c25e68790cc09e4dc277f44d33d51646ab219821" Jan 04 12:17:20 crc kubenswrapper[5003]: E0104 12:17:20.808928 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:17:31 crc kubenswrapper[5003]: I0104 12:17:31.806622 5003 scope.go:117] "RemoveContainer" containerID="83b49e76a6bec0f9f75f0fc3c25e68790cc09e4dc277f44d33d51646ab219821" Jan 04 12:17:31 crc kubenswrapper[5003]: E0104 12:17:31.807509 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:17:43 crc kubenswrapper[5003]: I0104 12:17:43.806835 5003 scope.go:117] "RemoveContainer" containerID="83b49e76a6bec0f9f75f0fc3c25e68790cc09e4dc277f44d33d51646ab219821" Jan 04 12:17:43 crc kubenswrapper[5003]: E0104 12:17:43.807431 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:17:54 crc kubenswrapper[5003]: I0104 12:17:54.813550 5003 scope.go:117] "RemoveContainer" containerID="83b49e76a6bec0f9f75f0fc3c25e68790cc09e4dc277f44d33d51646ab219821" Jan 04 12:17:54 crc kubenswrapper[5003]: E0104 12:17:54.814264 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:18:06 crc kubenswrapper[5003]: I0104 12:18:06.809458 5003 scope.go:117] "RemoveContainer" containerID="83b49e76a6bec0f9f75f0fc3c25e68790cc09e4dc277f44d33d51646ab219821" Jan 04 12:18:06 crc kubenswrapper[5003]: E0104 12:18:06.810159 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:18:12 crc kubenswrapper[5003]: I0104 12:18:12.352996 5003 scope.go:117] "RemoveContainer" containerID="57cbe7cb23145556ede2cb126df0b6f2b4320f9a6656889cbe5ab3eb6dd0b2cf" Jan 04 12:18:12 crc kubenswrapper[5003]: I0104 12:18:12.416879 5003 scope.go:117] "RemoveContainer" containerID="5f0ff55a5228bd2c7cce6f8b494982689ecaf0845f8cc4b99c6855bbd52077ae" Jan 04 12:18:12 crc kubenswrapper[5003]: I0104 12:18:12.435074 5003 scope.go:117] "RemoveContainer" containerID="43bfec4bd608685be4573015f15db1873d268c8b7296b6e047586f2093d24609" Jan 04 12:18:12 crc kubenswrapper[5003]: I0104 12:18:12.482269 5003 scope.go:117] "RemoveContainer" containerID="6591fb08d275b9ad5bb9d8eaf8fa974fea5226307b28d5b2749f852c425ee738" Jan 04 12:18:17 crc kubenswrapper[5003]: I0104 12:18:17.806391 5003 scope.go:117] "RemoveContainer" containerID="83b49e76a6bec0f9f75f0fc3c25e68790cc09e4dc277f44d33d51646ab219821" Jan 04 12:18:17 crc kubenswrapper[5003]: E0104 12:18:17.806877 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:18:30 crc kubenswrapper[5003]: I0104 12:18:30.807041 5003 scope.go:117] "RemoveContainer" containerID="83b49e76a6bec0f9f75f0fc3c25e68790cc09e4dc277f44d33d51646ab219821" Jan 04 12:18:30 crc kubenswrapper[5003]: E0104 12:18:30.807766 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:18:43 crc kubenswrapper[5003]: I0104 12:18:43.806995 5003 scope.go:117] "RemoveContainer" containerID="83b49e76a6bec0f9f75f0fc3c25e68790cc09e4dc277f44d33d51646ab219821" Jan 04 12:18:43 crc kubenswrapper[5003]: E0104 12:18:43.807642 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:18:57 crc kubenswrapper[5003]: I0104 12:18:57.807774 5003 scope.go:117] "RemoveContainer" containerID="83b49e76a6bec0f9f75f0fc3c25e68790cc09e4dc277f44d33d51646ab219821" Jan 04 12:18:57 crc kubenswrapper[5003]: E0104 12:18:57.809162 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:19:10 crc kubenswrapper[5003]: I0104 12:19:10.807694 5003 scope.go:117] "RemoveContainer" containerID="83b49e76a6bec0f9f75f0fc3c25e68790cc09e4dc277f44d33d51646ab219821" Jan 04 12:19:11 crc kubenswrapper[5003]: I0104 12:19:11.732103 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerStarted","Data":"03cda3b480f1dacd648b6d7c3cd6d11635544a79c0682834bde5c1ee008085d6"} Jan 04 12:19:12 crc kubenswrapper[5003]: I0104 12:19:12.555162 5003 scope.go:117] "RemoveContainer" containerID="a36128483858c51bdf228f1c485a058ce56e3cb51fe588687795efca501f2855" Jan 04 12:19:12 crc kubenswrapper[5003]: I0104 12:19:12.593603 5003 scope.go:117] "RemoveContainer" containerID="0251462325e49cea189f787676ef9b788f1da3cfefece6459ad475f7a74cd5c1" Jan 04 12:19:12 crc kubenswrapper[5003]: I0104 12:19:12.630212 5003 scope.go:117] "RemoveContainer" containerID="e83484bf8f6d67f08352ddbaa45d3a145e808fc097c9292bc7185dc5bfcfc109" Jan 04 12:20:12 crc kubenswrapper[5003]: I0104 12:20:12.764142 5003 scope.go:117] "RemoveContainer" containerID="69ed4feade45a6e74281effee1a4d4fac2fb5e3c202ed2e5dd93757020739ae2" Jan 04 12:21:20 crc kubenswrapper[5003]: I0104 12:21:20.125253 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vqxh4"] Jan 04 12:21:20 crc kubenswrapper[5003]: E0104 12:21:20.126754 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72ac7f8a-3e0b-4784-98c7-30a6a7306a77" containerName="collect-profiles" Jan 04 12:21:20 crc kubenswrapper[5003]: I0104 12:21:20.126791 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="72ac7f8a-3e0b-4784-98c7-30a6a7306a77" containerName="collect-profiles" Jan 04 12:21:20 crc kubenswrapper[5003]: I0104 12:21:20.127156 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="72ac7f8a-3e0b-4784-98c7-30a6a7306a77" containerName="collect-profiles" Jan 04 12:21:20 crc kubenswrapper[5003]: I0104 12:21:20.129763 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vqxh4" Jan 04 12:21:20 crc kubenswrapper[5003]: I0104 12:21:20.137290 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vqxh4"] Jan 04 12:21:20 crc kubenswrapper[5003]: I0104 12:21:20.180773 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6fsjt\" (UniqueName: \"kubernetes.io/projected/8209001b-d43b-4b7f-806e-2e3baaba2aba-kube-api-access-6fsjt\") pod \"redhat-operators-vqxh4\" (UID: \"8209001b-d43b-4b7f-806e-2e3baaba2aba\") " pod="openshift-marketplace/redhat-operators-vqxh4" Jan 04 12:21:20 crc kubenswrapper[5003]: I0104 12:21:20.180859 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8209001b-d43b-4b7f-806e-2e3baaba2aba-catalog-content\") pod \"redhat-operators-vqxh4\" (UID: \"8209001b-d43b-4b7f-806e-2e3baaba2aba\") " pod="openshift-marketplace/redhat-operators-vqxh4" Jan 04 12:21:20 crc kubenswrapper[5003]: I0104 12:21:20.180891 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8209001b-d43b-4b7f-806e-2e3baaba2aba-utilities\") pod \"redhat-operators-vqxh4\" (UID: \"8209001b-d43b-4b7f-806e-2e3baaba2aba\") " pod="openshift-marketplace/redhat-operators-vqxh4" Jan 04 12:21:20 crc kubenswrapper[5003]: I0104 12:21:20.282625 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6fsjt\" (UniqueName: \"kubernetes.io/projected/8209001b-d43b-4b7f-806e-2e3baaba2aba-kube-api-access-6fsjt\") pod \"redhat-operators-vqxh4\" (UID: \"8209001b-d43b-4b7f-806e-2e3baaba2aba\") " pod="openshift-marketplace/redhat-operators-vqxh4" Jan 04 12:21:20 crc kubenswrapper[5003]: I0104 12:21:20.282712 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8209001b-d43b-4b7f-806e-2e3baaba2aba-catalog-content\") pod \"redhat-operators-vqxh4\" (UID: \"8209001b-d43b-4b7f-806e-2e3baaba2aba\") " pod="openshift-marketplace/redhat-operators-vqxh4" Jan 04 12:21:20 crc kubenswrapper[5003]: I0104 12:21:20.282735 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8209001b-d43b-4b7f-806e-2e3baaba2aba-utilities\") pod \"redhat-operators-vqxh4\" (UID: \"8209001b-d43b-4b7f-806e-2e3baaba2aba\") " pod="openshift-marketplace/redhat-operators-vqxh4" Jan 04 12:21:20 crc kubenswrapper[5003]: I0104 12:21:20.283616 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8209001b-d43b-4b7f-806e-2e3baaba2aba-catalog-content\") pod \"redhat-operators-vqxh4\" (UID: \"8209001b-d43b-4b7f-806e-2e3baaba2aba\") " pod="openshift-marketplace/redhat-operators-vqxh4" Jan 04 12:21:20 crc kubenswrapper[5003]: I0104 12:21:20.283686 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8209001b-d43b-4b7f-806e-2e3baaba2aba-utilities\") pod \"redhat-operators-vqxh4\" (UID: \"8209001b-d43b-4b7f-806e-2e3baaba2aba\") " pod="openshift-marketplace/redhat-operators-vqxh4" Jan 04 12:21:20 crc kubenswrapper[5003]: I0104 12:21:20.304154 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6fsjt\" (UniqueName: \"kubernetes.io/projected/8209001b-d43b-4b7f-806e-2e3baaba2aba-kube-api-access-6fsjt\") pod \"redhat-operators-vqxh4\" (UID: \"8209001b-d43b-4b7f-806e-2e3baaba2aba\") " pod="openshift-marketplace/redhat-operators-vqxh4" Jan 04 12:21:20 crc kubenswrapper[5003]: I0104 12:21:20.453905 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vqxh4" Jan 04 12:21:20 crc kubenswrapper[5003]: I0104 12:21:20.727577 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vqxh4"] Jan 04 12:21:20 crc kubenswrapper[5003]: W0104 12:21:20.734040 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8209001b_d43b_4b7f_806e_2e3baaba2aba.slice/crio-78b8e204419102588cda12ecc1976bf44fd589aab578a0bf6ae0f18dbfef3263 WatchSource:0}: Error finding container 78b8e204419102588cda12ecc1976bf44fd589aab578a0bf6ae0f18dbfef3263: Status 404 returned error can't find the container with id 78b8e204419102588cda12ecc1976bf44fd589aab578a0bf6ae0f18dbfef3263 Jan 04 12:21:21 crc kubenswrapper[5003]: I0104 12:21:21.016252 5003 generic.go:334] "Generic (PLEG): container finished" podID="8209001b-d43b-4b7f-806e-2e3baaba2aba" containerID="3b6eb12a8b130b0dc919f4b68a6f4b5168b44cd3d08443629d57e438d573bafd" exitCode=0 Jan 04 12:21:21 crc kubenswrapper[5003]: I0104 12:21:21.016705 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vqxh4" event={"ID":"8209001b-d43b-4b7f-806e-2e3baaba2aba","Type":"ContainerDied","Data":"3b6eb12a8b130b0dc919f4b68a6f4b5168b44cd3d08443629d57e438d573bafd"} Jan 04 12:21:21 crc kubenswrapper[5003]: I0104 12:21:21.016861 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vqxh4" event={"ID":"8209001b-d43b-4b7f-806e-2e3baaba2aba","Type":"ContainerStarted","Data":"78b8e204419102588cda12ecc1976bf44fd589aab578a0bf6ae0f18dbfef3263"} Jan 04 12:21:21 crc kubenswrapper[5003]: I0104 12:21:21.019360 5003 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 04 12:21:22 crc kubenswrapper[5003]: I0104 12:21:22.028705 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vqxh4" event={"ID":"8209001b-d43b-4b7f-806e-2e3baaba2aba","Type":"ContainerStarted","Data":"d84062de1f614317b318e10f6134f67187f37473001de57473bd45b9b99063bd"} Jan 04 12:21:23 crc kubenswrapper[5003]: I0104 12:21:23.038657 5003 generic.go:334] "Generic (PLEG): container finished" podID="8209001b-d43b-4b7f-806e-2e3baaba2aba" containerID="d84062de1f614317b318e10f6134f67187f37473001de57473bd45b9b99063bd" exitCode=0 Jan 04 12:21:23 crc kubenswrapper[5003]: I0104 12:21:23.038769 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vqxh4" event={"ID":"8209001b-d43b-4b7f-806e-2e3baaba2aba","Type":"ContainerDied","Data":"d84062de1f614317b318e10f6134f67187f37473001de57473bd45b9b99063bd"} Jan 04 12:21:24 crc kubenswrapper[5003]: I0104 12:21:24.068746 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vqxh4" event={"ID":"8209001b-d43b-4b7f-806e-2e3baaba2aba","Type":"ContainerStarted","Data":"be73aa6ab8868be817b1c730f240b35d100121be1cc1816c9cdb0aa530ac5180"} Jan 04 12:21:30 crc kubenswrapper[5003]: I0104 12:21:30.454726 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vqxh4" Jan 04 12:21:30 crc kubenswrapper[5003]: I0104 12:21:30.455529 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vqxh4" Jan 04 12:21:30 crc kubenswrapper[5003]: I0104 12:21:30.498721 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vqxh4" Jan 04 12:21:30 crc kubenswrapper[5003]: I0104 12:21:30.536470 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vqxh4" podStartSLOduration=8.107791698 podStartE2EDuration="10.536434954s" podCreationTimestamp="2026-01-04 12:21:20 +0000 UTC" firstStartedPulling="2026-01-04 12:21:21.019166115 +0000 UTC m=+1996.492195956" lastFinishedPulling="2026-01-04 12:21:23.447809371 +0000 UTC m=+1998.920839212" observedRunningTime="2026-01-04 12:21:24.096642289 +0000 UTC m=+1999.569672140" watchObservedRunningTime="2026-01-04 12:21:30.536434954 +0000 UTC m=+2006.009464805" Jan 04 12:21:31 crc kubenswrapper[5003]: I0104 12:21:31.200799 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vqxh4" Jan 04 12:21:31 crc kubenswrapper[5003]: I0104 12:21:31.275884 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vqxh4"] Jan 04 12:21:33 crc kubenswrapper[5003]: I0104 12:21:33.150479 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vqxh4" podUID="8209001b-d43b-4b7f-806e-2e3baaba2aba" containerName="registry-server" containerID="cri-o://be73aa6ab8868be817b1c730f240b35d100121be1cc1816c9cdb0aa530ac5180" gracePeriod=2 Jan 04 12:21:35 crc kubenswrapper[5003]: I0104 12:21:35.170562 5003 generic.go:334] "Generic (PLEG): container finished" podID="8209001b-d43b-4b7f-806e-2e3baaba2aba" containerID="be73aa6ab8868be817b1c730f240b35d100121be1cc1816c9cdb0aa530ac5180" exitCode=0 Jan 04 12:21:35 crc kubenswrapper[5003]: I0104 12:21:35.170788 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vqxh4" event={"ID":"8209001b-d43b-4b7f-806e-2e3baaba2aba","Type":"ContainerDied","Data":"be73aa6ab8868be817b1c730f240b35d100121be1cc1816c9cdb0aa530ac5180"} Jan 04 12:21:35 crc kubenswrapper[5003]: I0104 12:21:35.412998 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vqxh4" Jan 04 12:21:35 crc kubenswrapper[5003]: I0104 12:21:35.543445 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6fsjt\" (UniqueName: \"kubernetes.io/projected/8209001b-d43b-4b7f-806e-2e3baaba2aba-kube-api-access-6fsjt\") pod \"8209001b-d43b-4b7f-806e-2e3baaba2aba\" (UID: \"8209001b-d43b-4b7f-806e-2e3baaba2aba\") " Jan 04 12:21:35 crc kubenswrapper[5003]: I0104 12:21:35.543504 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8209001b-d43b-4b7f-806e-2e3baaba2aba-utilities\") pod \"8209001b-d43b-4b7f-806e-2e3baaba2aba\" (UID: \"8209001b-d43b-4b7f-806e-2e3baaba2aba\") " Jan 04 12:21:35 crc kubenswrapper[5003]: I0104 12:21:35.543532 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8209001b-d43b-4b7f-806e-2e3baaba2aba-catalog-content\") pod \"8209001b-d43b-4b7f-806e-2e3baaba2aba\" (UID: \"8209001b-d43b-4b7f-806e-2e3baaba2aba\") " Jan 04 12:21:35 crc kubenswrapper[5003]: I0104 12:21:35.544821 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8209001b-d43b-4b7f-806e-2e3baaba2aba-utilities" (OuterVolumeSpecName: "utilities") pod "8209001b-d43b-4b7f-806e-2e3baaba2aba" (UID: "8209001b-d43b-4b7f-806e-2e3baaba2aba"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:21:35 crc kubenswrapper[5003]: I0104 12:21:35.552555 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8209001b-d43b-4b7f-806e-2e3baaba2aba-kube-api-access-6fsjt" (OuterVolumeSpecName: "kube-api-access-6fsjt") pod "8209001b-d43b-4b7f-806e-2e3baaba2aba" (UID: "8209001b-d43b-4b7f-806e-2e3baaba2aba"). InnerVolumeSpecName "kube-api-access-6fsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:21:35 crc kubenswrapper[5003]: I0104 12:21:35.645735 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6fsjt\" (UniqueName: \"kubernetes.io/projected/8209001b-d43b-4b7f-806e-2e3baaba2aba-kube-api-access-6fsjt\") on node \"crc\" DevicePath \"\"" Jan 04 12:21:35 crc kubenswrapper[5003]: I0104 12:21:35.645770 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8209001b-d43b-4b7f-806e-2e3baaba2aba-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:21:35 crc kubenswrapper[5003]: I0104 12:21:35.702897 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8209001b-d43b-4b7f-806e-2e3baaba2aba-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8209001b-d43b-4b7f-806e-2e3baaba2aba" (UID: "8209001b-d43b-4b7f-806e-2e3baaba2aba"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:21:35 crc kubenswrapper[5003]: I0104 12:21:35.747527 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8209001b-d43b-4b7f-806e-2e3baaba2aba-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:21:36 crc kubenswrapper[5003]: I0104 12:21:36.186759 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vqxh4" event={"ID":"8209001b-d43b-4b7f-806e-2e3baaba2aba","Type":"ContainerDied","Data":"78b8e204419102588cda12ecc1976bf44fd589aab578a0bf6ae0f18dbfef3263"} Jan 04 12:21:36 crc kubenswrapper[5003]: I0104 12:21:36.186835 5003 scope.go:117] "RemoveContainer" containerID="be73aa6ab8868be817b1c730f240b35d100121be1cc1816c9cdb0aa530ac5180" Jan 04 12:21:36 crc kubenswrapper[5003]: I0104 12:21:36.187243 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vqxh4" Jan 04 12:21:36 crc kubenswrapper[5003]: I0104 12:21:36.217239 5003 scope.go:117] "RemoveContainer" containerID="d84062de1f614317b318e10f6134f67187f37473001de57473bd45b9b99063bd" Jan 04 12:21:36 crc kubenswrapper[5003]: I0104 12:21:36.258480 5003 scope.go:117] "RemoveContainer" containerID="3b6eb12a8b130b0dc919f4b68a6f4b5168b44cd3d08443629d57e438d573bafd" Jan 04 12:21:36 crc kubenswrapper[5003]: I0104 12:21:36.258657 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vqxh4"] Jan 04 12:21:36 crc kubenswrapper[5003]: I0104 12:21:36.263068 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vqxh4"] Jan 04 12:21:36 crc kubenswrapper[5003]: I0104 12:21:36.820187 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8209001b-d43b-4b7f-806e-2e3baaba2aba" path="/var/lib/kubelet/pods/8209001b-d43b-4b7f-806e-2e3baaba2aba/volumes" Jan 04 12:21:39 crc kubenswrapper[5003]: I0104 12:21:39.418613 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:21:39 crc kubenswrapper[5003]: I0104 12:21:39.419385 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:22:09 crc kubenswrapper[5003]: I0104 12:22:09.419223 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:22:09 crc kubenswrapper[5003]: I0104 12:22:09.420096 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:22:19 crc kubenswrapper[5003]: I0104 12:22:19.071955 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-gd98f"] Jan 04 12:22:19 crc kubenswrapper[5003]: E0104 12:22:19.072906 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8209001b-d43b-4b7f-806e-2e3baaba2aba" containerName="registry-server" Jan 04 12:22:19 crc kubenswrapper[5003]: I0104 12:22:19.072924 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8209001b-d43b-4b7f-806e-2e3baaba2aba" containerName="registry-server" Jan 04 12:22:19 crc kubenswrapper[5003]: E0104 12:22:19.072950 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8209001b-d43b-4b7f-806e-2e3baaba2aba" containerName="extract-content" Jan 04 12:22:19 crc kubenswrapper[5003]: I0104 12:22:19.072958 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8209001b-d43b-4b7f-806e-2e3baaba2aba" containerName="extract-content" Jan 04 12:22:19 crc kubenswrapper[5003]: E0104 12:22:19.072969 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8209001b-d43b-4b7f-806e-2e3baaba2aba" containerName="extract-utilities" Jan 04 12:22:19 crc kubenswrapper[5003]: I0104 12:22:19.072978 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8209001b-d43b-4b7f-806e-2e3baaba2aba" containerName="extract-utilities" Jan 04 12:22:19 crc kubenswrapper[5003]: I0104 12:22:19.073168 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="8209001b-d43b-4b7f-806e-2e3baaba2aba" containerName="registry-server" Jan 04 12:22:19 crc kubenswrapper[5003]: I0104 12:22:19.074387 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gd98f" Jan 04 12:22:19 crc kubenswrapper[5003]: I0104 12:22:19.090044 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gd98f"] Jan 04 12:22:19 crc kubenswrapper[5003]: I0104 12:22:19.205309 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qphs7\" (UniqueName: \"kubernetes.io/projected/3c290a41-6802-4b75-9162-48515d0c28a3-kube-api-access-qphs7\") pod \"certified-operators-gd98f\" (UID: \"3c290a41-6802-4b75-9162-48515d0c28a3\") " pod="openshift-marketplace/certified-operators-gd98f" Jan 04 12:22:19 crc kubenswrapper[5003]: I0104 12:22:19.205710 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c290a41-6802-4b75-9162-48515d0c28a3-utilities\") pod \"certified-operators-gd98f\" (UID: \"3c290a41-6802-4b75-9162-48515d0c28a3\") " pod="openshift-marketplace/certified-operators-gd98f" Jan 04 12:22:19 crc kubenswrapper[5003]: I0104 12:22:19.205835 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c290a41-6802-4b75-9162-48515d0c28a3-catalog-content\") pod \"certified-operators-gd98f\" (UID: \"3c290a41-6802-4b75-9162-48515d0c28a3\") " pod="openshift-marketplace/certified-operators-gd98f" Jan 04 12:22:19 crc kubenswrapper[5003]: I0104 12:22:19.307722 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c290a41-6802-4b75-9162-48515d0c28a3-utilities\") pod \"certified-operators-gd98f\" (UID: \"3c290a41-6802-4b75-9162-48515d0c28a3\") " pod="openshift-marketplace/certified-operators-gd98f" Jan 04 12:22:19 crc kubenswrapper[5003]: I0104 12:22:19.307789 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c290a41-6802-4b75-9162-48515d0c28a3-catalog-content\") pod \"certified-operators-gd98f\" (UID: \"3c290a41-6802-4b75-9162-48515d0c28a3\") " pod="openshift-marketplace/certified-operators-gd98f" Jan 04 12:22:19 crc kubenswrapper[5003]: I0104 12:22:19.307865 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qphs7\" (UniqueName: \"kubernetes.io/projected/3c290a41-6802-4b75-9162-48515d0c28a3-kube-api-access-qphs7\") pod \"certified-operators-gd98f\" (UID: \"3c290a41-6802-4b75-9162-48515d0c28a3\") " pod="openshift-marketplace/certified-operators-gd98f" Jan 04 12:22:19 crc kubenswrapper[5003]: I0104 12:22:19.308356 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c290a41-6802-4b75-9162-48515d0c28a3-utilities\") pod \"certified-operators-gd98f\" (UID: \"3c290a41-6802-4b75-9162-48515d0c28a3\") " pod="openshift-marketplace/certified-operators-gd98f" Jan 04 12:22:19 crc kubenswrapper[5003]: I0104 12:22:19.308405 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c290a41-6802-4b75-9162-48515d0c28a3-catalog-content\") pod \"certified-operators-gd98f\" (UID: \"3c290a41-6802-4b75-9162-48515d0c28a3\") " pod="openshift-marketplace/certified-operators-gd98f" Jan 04 12:22:19 crc kubenswrapper[5003]: I0104 12:22:19.331469 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qphs7\" (UniqueName: \"kubernetes.io/projected/3c290a41-6802-4b75-9162-48515d0c28a3-kube-api-access-qphs7\") pod \"certified-operators-gd98f\" (UID: \"3c290a41-6802-4b75-9162-48515d0c28a3\") " pod="openshift-marketplace/certified-operators-gd98f" Jan 04 12:22:19 crc kubenswrapper[5003]: I0104 12:22:19.396398 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gd98f" Jan 04 12:22:19 crc kubenswrapper[5003]: I0104 12:22:19.857714 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gd98f"] Jan 04 12:22:20 crc kubenswrapper[5003]: I0104 12:22:20.592194 5003 generic.go:334] "Generic (PLEG): container finished" podID="3c290a41-6802-4b75-9162-48515d0c28a3" containerID="93c5ee1bd63f2d1563862e4982d9b41f765f3062d2967dd50fcc092c369caa95" exitCode=0 Jan 04 12:22:20 crc kubenswrapper[5003]: I0104 12:22:20.592297 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gd98f" event={"ID":"3c290a41-6802-4b75-9162-48515d0c28a3","Type":"ContainerDied","Data":"93c5ee1bd63f2d1563862e4982d9b41f765f3062d2967dd50fcc092c369caa95"} Jan 04 12:22:20 crc kubenswrapper[5003]: I0104 12:22:20.592534 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gd98f" event={"ID":"3c290a41-6802-4b75-9162-48515d0c28a3","Type":"ContainerStarted","Data":"fc91eca01daab919511a910bd48fff87c78ddd6039ba0d315e9ff42ad05aaed6"} Jan 04 12:22:21 crc kubenswrapper[5003]: I0104 12:22:21.599922 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gd98f" event={"ID":"3c290a41-6802-4b75-9162-48515d0c28a3","Type":"ContainerStarted","Data":"f090aea45a6d9a1d54863387bf3b3cb40bda2022ade5df8f4dfc894ae75d2eac"} Jan 04 12:22:22 crc kubenswrapper[5003]: I0104 12:22:22.611922 5003 generic.go:334] "Generic (PLEG): container finished" podID="3c290a41-6802-4b75-9162-48515d0c28a3" containerID="f090aea45a6d9a1d54863387bf3b3cb40bda2022ade5df8f4dfc894ae75d2eac" exitCode=0 Jan 04 12:22:22 crc kubenswrapper[5003]: I0104 12:22:22.612048 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gd98f" event={"ID":"3c290a41-6802-4b75-9162-48515d0c28a3","Type":"ContainerDied","Data":"f090aea45a6d9a1d54863387bf3b3cb40bda2022ade5df8f4dfc894ae75d2eac"} Jan 04 12:22:23 crc kubenswrapper[5003]: I0104 12:22:23.624686 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gd98f" event={"ID":"3c290a41-6802-4b75-9162-48515d0c28a3","Type":"ContainerStarted","Data":"6d53f9f173d66b08a51a48684081741507227636b0b33caf0caa17661f9ea873"} Jan 04 12:22:23 crc kubenswrapper[5003]: I0104 12:22:23.667717 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-gd98f" podStartSLOduration=2.271675152 podStartE2EDuration="4.667700259s" podCreationTimestamp="2026-01-04 12:22:19 +0000 UTC" firstStartedPulling="2026-01-04 12:22:20.594459373 +0000 UTC m=+2056.067489224" lastFinishedPulling="2026-01-04 12:22:22.99048449 +0000 UTC m=+2058.463514331" observedRunningTime="2026-01-04 12:22:23.662530022 +0000 UTC m=+2059.135559873" watchObservedRunningTime="2026-01-04 12:22:23.667700259 +0000 UTC m=+2059.140730100" Jan 04 12:22:29 crc kubenswrapper[5003]: I0104 12:22:29.397134 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-gd98f" Jan 04 12:22:29 crc kubenswrapper[5003]: I0104 12:22:29.397721 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-gd98f" Jan 04 12:22:29 crc kubenswrapper[5003]: I0104 12:22:29.447236 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-gd98f" Jan 04 12:22:29 crc kubenswrapper[5003]: I0104 12:22:29.714129 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-gd98f" Jan 04 12:22:30 crc kubenswrapper[5003]: I0104 12:22:30.656955 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gd98f"] Jan 04 12:22:31 crc kubenswrapper[5003]: I0104 12:22:31.686545 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-gd98f" podUID="3c290a41-6802-4b75-9162-48515d0c28a3" containerName="registry-server" containerID="cri-o://6d53f9f173d66b08a51a48684081741507227636b0b33caf0caa17661f9ea873" gracePeriod=2 Jan 04 12:22:32 crc kubenswrapper[5003]: I0104 12:22:32.697770 5003 generic.go:334] "Generic (PLEG): container finished" podID="3c290a41-6802-4b75-9162-48515d0c28a3" containerID="6d53f9f173d66b08a51a48684081741507227636b0b33caf0caa17661f9ea873" exitCode=0 Jan 04 12:22:32 crc kubenswrapper[5003]: I0104 12:22:32.697951 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gd98f" event={"ID":"3c290a41-6802-4b75-9162-48515d0c28a3","Type":"ContainerDied","Data":"6d53f9f173d66b08a51a48684081741507227636b0b33caf0caa17661f9ea873"} Jan 04 12:22:34 crc kubenswrapper[5003]: I0104 12:22:34.009227 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gd98f" Jan 04 12:22:34 crc kubenswrapper[5003]: I0104 12:22:34.144523 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qphs7\" (UniqueName: \"kubernetes.io/projected/3c290a41-6802-4b75-9162-48515d0c28a3-kube-api-access-qphs7\") pod \"3c290a41-6802-4b75-9162-48515d0c28a3\" (UID: \"3c290a41-6802-4b75-9162-48515d0c28a3\") " Jan 04 12:22:34 crc kubenswrapper[5003]: I0104 12:22:34.144574 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c290a41-6802-4b75-9162-48515d0c28a3-utilities\") pod \"3c290a41-6802-4b75-9162-48515d0c28a3\" (UID: \"3c290a41-6802-4b75-9162-48515d0c28a3\") " Jan 04 12:22:34 crc kubenswrapper[5003]: I0104 12:22:34.144643 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c290a41-6802-4b75-9162-48515d0c28a3-catalog-content\") pod \"3c290a41-6802-4b75-9162-48515d0c28a3\" (UID: \"3c290a41-6802-4b75-9162-48515d0c28a3\") " Jan 04 12:22:34 crc kubenswrapper[5003]: I0104 12:22:34.145956 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c290a41-6802-4b75-9162-48515d0c28a3-utilities" (OuterVolumeSpecName: "utilities") pod "3c290a41-6802-4b75-9162-48515d0c28a3" (UID: "3c290a41-6802-4b75-9162-48515d0c28a3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:22:34 crc kubenswrapper[5003]: I0104 12:22:34.156206 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c290a41-6802-4b75-9162-48515d0c28a3-kube-api-access-qphs7" (OuterVolumeSpecName: "kube-api-access-qphs7") pod "3c290a41-6802-4b75-9162-48515d0c28a3" (UID: "3c290a41-6802-4b75-9162-48515d0c28a3"). InnerVolumeSpecName "kube-api-access-qphs7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:22:34 crc kubenswrapper[5003]: I0104 12:22:34.196547 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c290a41-6802-4b75-9162-48515d0c28a3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3c290a41-6802-4b75-9162-48515d0c28a3" (UID: "3c290a41-6802-4b75-9162-48515d0c28a3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:22:34 crc kubenswrapper[5003]: I0104 12:22:34.245918 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c290a41-6802-4b75-9162-48515d0c28a3-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:22:34 crc kubenswrapper[5003]: I0104 12:22:34.245968 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c290a41-6802-4b75-9162-48515d0c28a3-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:22:34 crc kubenswrapper[5003]: I0104 12:22:34.245979 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qphs7\" (UniqueName: \"kubernetes.io/projected/3c290a41-6802-4b75-9162-48515d0c28a3-kube-api-access-qphs7\") on node \"crc\" DevicePath \"\"" Jan 04 12:22:34 crc kubenswrapper[5003]: I0104 12:22:34.713614 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gd98f" event={"ID":"3c290a41-6802-4b75-9162-48515d0c28a3","Type":"ContainerDied","Data":"fc91eca01daab919511a910bd48fff87c78ddd6039ba0d315e9ff42ad05aaed6"} Jan 04 12:22:34 crc kubenswrapper[5003]: I0104 12:22:34.713671 5003 scope.go:117] "RemoveContainer" containerID="6d53f9f173d66b08a51a48684081741507227636b0b33caf0caa17661f9ea873" Jan 04 12:22:34 crc kubenswrapper[5003]: I0104 12:22:34.713710 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gd98f" Jan 04 12:22:34 crc kubenswrapper[5003]: I0104 12:22:34.741181 5003 scope.go:117] "RemoveContainer" containerID="f090aea45a6d9a1d54863387bf3b3cb40bda2022ade5df8f4dfc894ae75d2eac" Jan 04 12:22:34 crc kubenswrapper[5003]: I0104 12:22:34.753305 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gd98f"] Jan 04 12:22:34 crc kubenswrapper[5003]: I0104 12:22:34.759094 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-gd98f"] Jan 04 12:22:34 crc kubenswrapper[5003]: I0104 12:22:34.783351 5003 scope.go:117] "RemoveContainer" containerID="93c5ee1bd63f2d1563862e4982d9b41f765f3062d2967dd50fcc092c369caa95" Jan 04 12:22:34 crc kubenswrapper[5003]: I0104 12:22:34.814729 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c290a41-6802-4b75-9162-48515d0c28a3" path="/var/lib/kubelet/pods/3c290a41-6802-4b75-9162-48515d0c28a3/volumes" Jan 04 12:22:39 crc kubenswrapper[5003]: I0104 12:22:39.418842 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:22:39 crc kubenswrapper[5003]: I0104 12:22:39.419282 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:22:39 crc kubenswrapper[5003]: I0104 12:22:39.419381 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 12:22:39 crc kubenswrapper[5003]: I0104 12:22:39.420260 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"03cda3b480f1dacd648b6d7c3cd6d11635544a79c0682834bde5c1ee008085d6"} pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 12:22:39 crc kubenswrapper[5003]: I0104 12:22:39.420356 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" containerID="cri-o://03cda3b480f1dacd648b6d7c3cd6d11635544a79c0682834bde5c1ee008085d6" gracePeriod=600 Jan 04 12:22:39 crc kubenswrapper[5003]: I0104 12:22:39.761904 5003 generic.go:334] "Generic (PLEG): container finished" podID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerID="03cda3b480f1dacd648b6d7c3cd6d11635544a79c0682834bde5c1ee008085d6" exitCode=0 Jan 04 12:22:39 crc kubenswrapper[5003]: I0104 12:22:39.762078 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerDied","Data":"03cda3b480f1dacd648b6d7c3cd6d11635544a79c0682834bde5c1ee008085d6"} Jan 04 12:22:39 crc kubenswrapper[5003]: I0104 12:22:39.762689 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerStarted","Data":"3d5f526caf6b6337fdcc0c4e9bf6e07de138ca901a8a09773972e393184abcb2"} Jan 04 12:22:39 crc kubenswrapper[5003]: I0104 12:22:39.762734 5003 scope.go:117] "RemoveContainer" containerID="83b49e76a6bec0f9f75f0fc3c25e68790cc09e4dc277f44d33d51646ab219821" Jan 04 12:23:57 crc kubenswrapper[5003]: I0104 12:23:57.084646 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-g74dz"] Jan 04 12:23:57 crc kubenswrapper[5003]: E0104 12:23:57.085662 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c290a41-6802-4b75-9162-48515d0c28a3" containerName="extract-content" Jan 04 12:23:57 crc kubenswrapper[5003]: I0104 12:23:57.085677 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c290a41-6802-4b75-9162-48515d0c28a3" containerName="extract-content" Jan 04 12:23:57 crc kubenswrapper[5003]: E0104 12:23:57.085696 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c290a41-6802-4b75-9162-48515d0c28a3" containerName="extract-utilities" Jan 04 12:23:57 crc kubenswrapper[5003]: I0104 12:23:57.085705 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c290a41-6802-4b75-9162-48515d0c28a3" containerName="extract-utilities" Jan 04 12:23:57 crc kubenswrapper[5003]: E0104 12:23:57.085721 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c290a41-6802-4b75-9162-48515d0c28a3" containerName="registry-server" Jan 04 12:23:57 crc kubenswrapper[5003]: I0104 12:23:57.085729 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c290a41-6802-4b75-9162-48515d0c28a3" containerName="registry-server" Jan 04 12:23:57 crc kubenswrapper[5003]: I0104 12:23:57.085900 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c290a41-6802-4b75-9162-48515d0c28a3" containerName="registry-server" Jan 04 12:23:57 crc kubenswrapper[5003]: I0104 12:23:57.087052 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g74dz" Jan 04 12:23:57 crc kubenswrapper[5003]: I0104 12:23:57.103355 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-g74dz"] Jan 04 12:23:57 crc kubenswrapper[5003]: I0104 12:23:57.127424 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e427c06f-1617-45a4-a334-ae8fb888ea62-catalog-content\") pod \"community-operators-g74dz\" (UID: \"e427c06f-1617-45a4-a334-ae8fb888ea62\") " pod="openshift-marketplace/community-operators-g74dz" Jan 04 12:23:57 crc kubenswrapper[5003]: I0104 12:23:57.127501 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e427c06f-1617-45a4-a334-ae8fb888ea62-utilities\") pod \"community-operators-g74dz\" (UID: \"e427c06f-1617-45a4-a334-ae8fb888ea62\") " pod="openshift-marketplace/community-operators-g74dz" Jan 04 12:23:57 crc kubenswrapper[5003]: I0104 12:23:57.127662 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxpll\" (UniqueName: \"kubernetes.io/projected/e427c06f-1617-45a4-a334-ae8fb888ea62-kube-api-access-rxpll\") pod \"community-operators-g74dz\" (UID: \"e427c06f-1617-45a4-a334-ae8fb888ea62\") " pod="openshift-marketplace/community-operators-g74dz" Jan 04 12:23:57 crc kubenswrapper[5003]: I0104 12:23:57.228344 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e427c06f-1617-45a4-a334-ae8fb888ea62-catalog-content\") pod \"community-operators-g74dz\" (UID: \"e427c06f-1617-45a4-a334-ae8fb888ea62\") " pod="openshift-marketplace/community-operators-g74dz" Jan 04 12:23:57 crc kubenswrapper[5003]: I0104 12:23:57.228406 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e427c06f-1617-45a4-a334-ae8fb888ea62-utilities\") pod \"community-operators-g74dz\" (UID: \"e427c06f-1617-45a4-a334-ae8fb888ea62\") " pod="openshift-marketplace/community-operators-g74dz" Jan 04 12:23:57 crc kubenswrapper[5003]: I0104 12:23:57.228466 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxpll\" (UniqueName: \"kubernetes.io/projected/e427c06f-1617-45a4-a334-ae8fb888ea62-kube-api-access-rxpll\") pod \"community-operators-g74dz\" (UID: \"e427c06f-1617-45a4-a334-ae8fb888ea62\") " pod="openshift-marketplace/community-operators-g74dz" Jan 04 12:23:57 crc kubenswrapper[5003]: I0104 12:23:57.229259 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e427c06f-1617-45a4-a334-ae8fb888ea62-utilities\") pod \"community-operators-g74dz\" (UID: \"e427c06f-1617-45a4-a334-ae8fb888ea62\") " pod="openshift-marketplace/community-operators-g74dz" Jan 04 12:23:57 crc kubenswrapper[5003]: I0104 12:23:57.229379 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e427c06f-1617-45a4-a334-ae8fb888ea62-catalog-content\") pod \"community-operators-g74dz\" (UID: \"e427c06f-1617-45a4-a334-ae8fb888ea62\") " pod="openshift-marketplace/community-operators-g74dz" Jan 04 12:23:57 crc kubenswrapper[5003]: I0104 12:23:57.253252 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxpll\" (UniqueName: \"kubernetes.io/projected/e427c06f-1617-45a4-a334-ae8fb888ea62-kube-api-access-rxpll\") pod \"community-operators-g74dz\" (UID: \"e427c06f-1617-45a4-a334-ae8fb888ea62\") " pod="openshift-marketplace/community-operators-g74dz" Jan 04 12:23:57 crc kubenswrapper[5003]: I0104 12:23:57.407505 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g74dz" Jan 04 12:23:57 crc kubenswrapper[5003]: I0104 12:23:57.752312 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-g74dz"] Jan 04 12:23:58 crc kubenswrapper[5003]: I0104 12:23:58.473181 5003 generic.go:334] "Generic (PLEG): container finished" podID="e427c06f-1617-45a4-a334-ae8fb888ea62" containerID="e88360e9bb41bb8e8589af2578aa4694190156f00506ada6481518e75852786e" exitCode=0 Jan 04 12:23:58 crc kubenswrapper[5003]: I0104 12:23:58.473234 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g74dz" event={"ID":"e427c06f-1617-45a4-a334-ae8fb888ea62","Type":"ContainerDied","Data":"e88360e9bb41bb8e8589af2578aa4694190156f00506ada6481518e75852786e"} Jan 04 12:23:58 crc kubenswrapper[5003]: I0104 12:23:58.473264 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g74dz" event={"ID":"e427c06f-1617-45a4-a334-ae8fb888ea62","Type":"ContainerStarted","Data":"13b5dd33fd3e6f420a11c322abfe49ebaa00d7384532a66b8e43a0d91a95485b"} Jan 04 12:23:59 crc kubenswrapper[5003]: I0104 12:23:59.497911 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g74dz" event={"ID":"e427c06f-1617-45a4-a334-ae8fb888ea62","Type":"ContainerStarted","Data":"3cea31498c4ba5eff80d568305a03fb0989279b13cec7f5b9c670957e6fdb7a9"} Jan 04 12:24:00 crc kubenswrapper[5003]: I0104 12:24:00.517077 5003 generic.go:334] "Generic (PLEG): container finished" podID="e427c06f-1617-45a4-a334-ae8fb888ea62" containerID="3cea31498c4ba5eff80d568305a03fb0989279b13cec7f5b9c670957e6fdb7a9" exitCode=0 Jan 04 12:24:00 crc kubenswrapper[5003]: I0104 12:24:00.517224 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g74dz" event={"ID":"e427c06f-1617-45a4-a334-ae8fb888ea62","Type":"ContainerDied","Data":"3cea31498c4ba5eff80d568305a03fb0989279b13cec7f5b9c670957e6fdb7a9"} Jan 04 12:24:01 crc kubenswrapper[5003]: I0104 12:24:01.528276 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g74dz" event={"ID":"e427c06f-1617-45a4-a334-ae8fb888ea62","Type":"ContainerStarted","Data":"eab4658136cdb89dad6debcb510010b50d7efb0b6edcf6405b469698620da20b"} Jan 04 12:24:01 crc kubenswrapper[5003]: I0104 12:24:01.554912 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-g74dz" podStartSLOduration=1.921227163 podStartE2EDuration="4.55488846s" podCreationTimestamp="2026-01-04 12:23:57 +0000 UTC" firstStartedPulling="2026-01-04 12:23:58.475427948 +0000 UTC m=+2153.948457789" lastFinishedPulling="2026-01-04 12:24:01.109089195 +0000 UTC m=+2156.582119086" observedRunningTime="2026-01-04 12:24:01.548907593 +0000 UTC m=+2157.021937444" watchObservedRunningTime="2026-01-04 12:24:01.55488846 +0000 UTC m=+2157.027918291" Jan 04 12:24:07 crc kubenswrapper[5003]: I0104 12:24:07.056316 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-sm9h6"] Jan 04 12:24:07 crc kubenswrapper[5003]: I0104 12:24:07.058637 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sm9h6" Jan 04 12:24:07 crc kubenswrapper[5003]: I0104 12:24:07.072398 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sm9h6"] Jan 04 12:24:07 crc kubenswrapper[5003]: I0104 12:24:07.159925 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12014332-9c53-41c7-9b74-22fef422edf6-utilities\") pod \"redhat-marketplace-sm9h6\" (UID: \"12014332-9c53-41c7-9b74-22fef422edf6\") " pod="openshift-marketplace/redhat-marketplace-sm9h6" Jan 04 12:24:07 crc kubenswrapper[5003]: I0104 12:24:07.159986 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12014332-9c53-41c7-9b74-22fef422edf6-catalog-content\") pod \"redhat-marketplace-sm9h6\" (UID: \"12014332-9c53-41c7-9b74-22fef422edf6\") " pod="openshift-marketplace/redhat-marketplace-sm9h6" Jan 04 12:24:07 crc kubenswrapper[5003]: I0104 12:24:07.160114 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2z5bc\" (UniqueName: \"kubernetes.io/projected/12014332-9c53-41c7-9b74-22fef422edf6-kube-api-access-2z5bc\") pod \"redhat-marketplace-sm9h6\" (UID: \"12014332-9c53-41c7-9b74-22fef422edf6\") " pod="openshift-marketplace/redhat-marketplace-sm9h6" Jan 04 12:24:07 crc kubenswrapper[5003]: I0104 12:24:07.261431 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2z5bc\" (UniqueName: \"kubernetes.io/projected/12014332-9c53-41c7-9b74-22fef422edf6-kube-api-access-2z5bc\") pod \"redhat-marketplace-sm9h6\" (UID: \"12014332-9c53-41c7-9b74-22fef422edf6\") " pod="openshift-marketplace/redhat-marketplace-sm9h6" Jan 04 12:24:07 crc kubenswrapper[5003]: I0104 12:24:07.261548 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12014332-9c53-41c7-9b74-22fef422edf6-utilities\") pod \"redhat-marketplace-sm9h6\" (UID: \"12014332-9c53-41c7-9b74-22fef422edf6\") " pod="openshift-marketplace/redhat-marketplace-sm9h6" Jan 04 12:24:07 crc kubenswrapper[5003]: I0104 12:24:07.261587 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12014332-9c53-41c7-9b74-22fef422edf6-catalog-content\") pod \"redhat-marketplace-sm9h6\" (UID: \"12014332-9c53-41c7-9b74-22fef422edf6\") " pod="openshift-marketplace/redhat-marketplace-sm9h6" Jan 04 12:24:07 crc kubenswrapper[5003]: I0104 12:24:07.262337 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12014332-9c53-41c7-9b74-22fef422edf6-utilities\") pod \"redhat-marketplace-sm9h6\" (UID: \"12014332-9c53-41c7-9b74-22fef422edf6\") " pod="openshift-marketplace/redhat-marketplace-sm9h6" Jan 04 12:24:07 crc kubenswrapper[5003]: I0104 12:24:07.262386 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12014332-9c53-41c7-9b74-22fef422edf6-catalog-content\") pod \"redhat-marketplace-sm9h6\" (UID: \"12014332-9c53-41c7-9b74-22fef422edf6\") " pod="openshift-marketplace/redhat-marketplace-sm9h6" Jan 04 12:24:07 crc kubenswrapper[5003]: I0104 12:24:07.286246 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2z5bc\" (UniqueName: \"kubernetes.io/projected/12014332-9c53-41c7-9b74-22fef422edf6-kube-api-access-2z5bc\") pod \"redhat-marketplace-sm9h6\" (UID: \"12014332-9c53-41c7-9b74-22fef422edf6\") " pod="openshift-marketplace/redhat-marketplace-sm9h6" Jan 04 12:24:07 crc kubenswrapper[5003]: I0104 12:24:07.408461 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-g74dz" Jan 04 12:24:07 crc kubenswrapper[5003]: I0104 12:24:07.408580 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-g74dz" Jan 04 12:24:07 crc kubenswrapper[5003]: I0104 12:24:07.441086 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sm9h6" Jan 04 12:24:07 crc kubenswrapper[5003]: I0104 12:24:07.471662 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-g74dz" Jan 04 12:24:07 crc kubenswrapper[5003]: I0104 12:24:07.697893 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-g74dz" Jan 04 12:24:07 crc kubenswrapper[5003]: I0104 12:24:07.886429 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-sm9h6"] Jan 04 12:24:08 crc kubenswrapper[5003]: I0104 12:24:08.660046 5003 generic.go:334] "Generic (PLEG): container finished" podID="12014332-9c53-41c7-9b74-22fef422edf6" containerID="e6f6e487d992edcf3ff993286bf495f0c2d8e14ced61cc6447fb0a6ae68d3deb" exitCode=0 Jan 04 12:24:08 crc kubenswrapper[5003]: I0104 12:24:08.660736 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sm9h6" event={"ID":"12014332-9c53-41c7-9b74-22fef422edf6","Type":"ContainerDied","Data":"e6f6e487d992edcf3ff993286bf495f0c2d8e14ced61cc6447fb0a6ae68d3deb"} Jan 04 12:24:08 crc kubenswrapper[5003]: I0104 12:24:08.660792 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sm9h6" event={"ID":"12014332-9c53-41c7-9b74-22fef422edf6","Type":"ContainerStarted","Data":"048eecdab7811ffd79de575428fc071687dbd8516ad8df2ee946f3615572d229"} Jan 04 12:24:08 crc kubenswrapper[5003]: I0104 12:24:08.862786 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-g74dz"] Jan 04 12:24:09 crc kubenswrapper[5003]: I0104 12:24:09.669841 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-g74dz" podUID="e427c06f-1617-45a4-a334-ae8fb888ea62" containerName="registry-server" containerID="cri-o://eab4658136cdb89dad6debcb510010b50d7efb0b6edcf6405b469698620da20b" gracePeriod=2 Jan 04 12:24:10 crc kubenswrapper[5003]: I0104 12:24:10.612741 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g74dz" Jan 04 12:24:10 crc kubenswrapper[5003]: I0104 12:24:10.679697 5003 generic.go:334] "Generic (PLEG): container finished" podID="e427c06f-1617-45a4-a334-ae8fb888ea62" containerID="eab4658136cdb89dad6debcb510010b50d7efb0b6edcf6405b469698620da20b" exitCode=0 Jan 04 12:24:10 crc kubenswrapper[5003]: I0104 12:24:10.679758 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g74dz" Jan 04 12:24:10 crc kubenswrapper[5003]: I0104 12:24:10.679799 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g74dz" event={"ID":"e427c06f-1617-45a4-a334-ae8fb888ea62","Type":"ContainerDied","Data":"eab4658136cdb89dad6debcb510010b50d7efb0b6edcf6405b469698620da20b"} Jan 04 12:24:10 crc kubenswrapper[5003]: I0104 12:24:10.679877 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g74dz" event={"ID":"e427c06f-1617-45a4-a334-ae8fb888ea62","Type":"ContainerDied","Data":"13b5dd33fd3e6f420a11c322abfe49ebaa00d7384532a66b8e43a0d91a95485b"} Jan 04 12:24:10 crc kubenswrapper[5003]: I0104 12:24:10.679903 5003 scope.go:117] "RemoveContainer" containerID="eab4658136cdb89dad6debcb510010b50d7efb0b6edcf6405b469698620da20b" Jan 04 12:24:10 crc kubenswrapper[5003]: I0104 12:24:10.683126 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sm9h6" event={"ID":"12014332-9c53-41c7-9b74-22fef422edf6","Type":"ContainerDied","Data":"06533007f150b4fd5296eb6b302e780aef1fb120d1b5ed585c7a445c5dfb3a29"} Jan 04 12:24:10 crc kubenswrapper[5003]: I0104 12:24:10.683122 5003 generic.go:334] "Generic (PLEG): container finished" podID="12014332-9c53-41c7-9b74-22fef422edf6" containerID="06533007f150b4fd5296eb6b302e780aef1fb120d1b5ed585c7a445c5dfb3a29" exitCode=0 Jan 04 12:24:10 crc kubenswrapper[5003]: I0104 12:24:10.710065 5003 scope.go:117] "RemoveContainer" containerID="3cea31498c4ba5eff80d568305a03fb0989279b13cec7f5b9c670957e6fdb7a9" Jan 04 12:24:10 crc kubenswrapper[5003]: I0104 12:24:10.725854 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rxpll\" (UniqueName: \"kubernetes.io/projected/e427c06f-1617-45a4-a334-ae8fb888ea62-kube-api-access-rxpll\") pod \"e427c06f-1617-45a4-a334-ae8fb888ea62\" (UID: \"e427c06f-1617-45a4-a334-ae8fb888ea62\") " Jan 04 12:24:10 crc kubenswrapper[5003]: I0104 12:24:10.726113 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e427c06f-1617-45a4-a334-ae8fb888ea62-utilities\") pod \"e427c06f-1617-45a4-a334-ae8fb888ea62\" (UID: \"e427c06f-1617-45a4-a334-ae8fb888ea62\") " Jan 04 12:24:10 crc kubenswrapper[5003]: I0104 12:24:10.726175 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e427c06f-1617-45a4-a334-ae8fb888ea62-catalog-content\") pod \"e427c06f-1617-45a4-a334-ae8fb888ea62\" (UID: \"e427c06f-1617-45a4-a334-ae8fb888ea62\") " Jan 04 12:24:10 crc kubenswrapper[5003]: I0104 12:24:10.727328 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e427c06f-1617-45a4-a334-ae8fb888ea62-utilities" (OuterVolumeSpecName: "utilities") pod "e427c06f-1617-45a4-a334-ae8fb888ea62" (UID: "e427c06f-1617-45a4-a334-ae8fb888ea62"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:24:10 crc kubenswrapper[5003]: I0104 12:24:10.734655 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e427c06f-1617-45a4-a334-ae8fb888ea62-kube-api-access-rxpll" (OuterVolumeSpecName: "kube-api-access-rxpll") pod "e427c06f-1617-45a4-a334-ae8fb888ea62" (UID: "e427c06f-1617-45a4-a334-ae8fb888ea62"). InnerVolumeSpecName "kube-api-access-rxpll". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:24:10 crc kubenswrapper[5003]: I0104 12:24:10.745454 5003 scope.go:117] "RemoveContainer" containerID="e88360e9bb41bb8e8589af2578aa4694190156f00506ada6481518e75852786e" Jan 04 12:24:10 crc kubenswrapper[5003]: I0104 12:24:10.776533 5003 scope.go:117] "RemoveContainer" containerID="eab4658136cdb89dad6debcb510010b50d7efb0b6edcf6405b469698620da20b" Jan 04 12:24:10 crc kubenswrapper[5003]: E0104 12:24:10.777274 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eab4658136cdb89dad6debcb510010b50d7efb0b6edcf6405b469698620da20b\": container with ID starting with eab4658136cdb89dad6debcb510010b50d7efb0b6edcf6405b469698620da20b not found: ID does not exist" containerID="eab4658136cdb89dad6debcb510010b50d7efb0b6edcf6405b469698620da20b" Jan 04 12:24:10 crc kubenswrapper[5003]: I0104 12:24:10.777333 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eab4658136cdb89dad6debcb510010b50d7efb0b6edcf6405b469698620da20b"} err="failed to get container status \"eab4658136cdb89dad6debcb510010b50d7efb0b6edcf6405b469698620da20b\": rpc error: code = NotFound desc = could not find container \"eab4658136cdb89dad6debcb510010b50d7efb0b6edcf6405b469698620da20b\": container with ID starting with eab4658136cdb89dad6debcb510010b50d7efb0b6edcf6405b469698620da20b not found: ID does not exist" Jan 04 12:24:10 crc kubenswrapper[5003]: I0104 12:24:10.777371 5003 scope.go:117] "RemoveContainer" containerID="3cea31498c4ba5eff80d568305a03fb0989279b13cec7f5b9c670957e6fdb7a9" Jan 04 12:24:10 crc kubenswrapper[5003]: E0104 12:24:10.777896 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3cea31498c4ba5eff80d568305a03fb0989279b13cec7f5b9c670957e6fdb7a9\": container with ID starting with 3cea31498c4ba5eff80d568305a03fb0989279b13cec7f5b9c670957e6fdb7a9 not found: ID does not exist" containerID="3cea31498c4ba5eff80d568305a03fb0989279b13cec7f5b9c670957e6fdb7a9" Jan 04 12:24:10 crc kubenswrapper[5003]: I0104 12:24:10.777949 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3cea31498c4ba5eff80d568305a03fb0989279b13cec7f5b9c670957e6fdb7a9"} err="failed to get container status \"3cea31498c4ba5eff80d568305a03fb0989279b13cec7f5b9c670957e6fdb7a9\": rpc error: code = NotFound desc = could not find container \"3cea31498c4ba5eff80d568305a03fb0989279b13cec7f5b9c670957e6fdb7a9\": container with ID starting with 3cea31498c4ba5eff80d568305a03fb0989279b13cec7f5b9c670957e6fdb7a9 not found: ID does not exist" Jan 04 12:24:10 crc kubenswrapper[5003]: I0104 12:24:10.778000 5003 scope.go:117] "RemoveContainer" containerID="e88360e9bb41bb8e8589af2578aa4694190156f00506ada6481518e75852786e" Jan 04 12:24:10 crc kubenswrapper[5003]: E0104 12:24:10.778419 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e88360e9bb41bb8e8589af2578aa4694190156f00506ada6481518e75852786e\": container with ID starting with e88360e9bb41bb8e8589af2578aa4694190156f00506ada6481518e75852786e not found: ID does not exist" containerID="e88360e9bb41bb8e8589af2578aa4694190156f00506ada6481518e75852786e" Jan 04 12:24:10 crc kubenswrapper[5003]: I0104 12:24:10.778453 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e88360e9bb41bb8e8589af2578aa4694190156f00506ada6481518e75852786e"} err="failed to get container status \"e88360e9bb41bb8e8589af2578aa4694190156f00506ada6481518e75852786e\": rpc error: code = NotFound desc = could not find container \"e88360e9bb41bb8e8589af2578aa4694190156f00506ada6481518e75852786e\": container with ID starting with e88360e9bb41bb8e8589af2578aa4694190156f00506ada6481518e75852786e not found: ID does not exist" Jan 04 12:24:10 crc kubenswrapper[5003]: I0104 12:24:10.788417 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e427c06f-1617-45a4-a334-ae8fb888ea62-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e427c06f-1617-45a4-a334-ae8fb888ea62" (UID: "e427c06f-1617-45a4-a334-ae8fb888ea62"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:24:10 crc kubenswrapper[5003]: I0104 12:24:10.827626 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e427c06f-1617-45a4-a334-ae8fb888ea62-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:24:10 crc kubenswrapper[5003]: I0104 12:24:10.827668 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rxpll\" (UniqueName: \"kubernetes.io/projected/e427c06f-1617-45a4-a334-ae8fb888ea62-kube-api-access-rxpll\") on node \"crc\" DevicePath \"\"" Jan 04 12:24:10 crc kubenswrapper[5003]: I0104 12:24:10.827682 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e427c06f-1617-45a4-a334-ae8fb888ea62-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:24:11 crc kubenswrapper[5003]: I0104 12:24:11.010640 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-g74dz"] Jan 04 12:24:11 crc kubenswrapper[5003]: I0104 12:24:11.025982 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-g74dz"] Jan 04 12:24:11 crc kubenswrapper[5003]: I0104 12:24:11.696307 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sm9h6" event={"ID":"12014332-9c53-41c7-9b74-22fef422edf6","Type":"ContainerStarted","Data":"0dde723ce5d3a7d7ddc42adb98c27c8a8e1d62bcb0c6ceacebe8c5d7029cff69"} Jan 04 12:24:11 crc kubenswrapper[5003]: I0104 12:24:11.723691 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-sm9h6" podStartSLOduration=2.245215618 podStartE2EDuration="4.723659757s" podCreationTimestamp="2026-01-04 12:24:07 +0000 UTC" firstStartedPulling="2026-01-04 12:24:08.664315043 +0000 UTC m=+2164.137344884" lastFinishedPulling="2026-01-04 12:24:11.142759142 +0000 UTC m=+2166.615789023" observedRunningTime="2026-01-04 12:24:11.721954622 +0000 UTC m=+2167.194984523" watchObservedRunningTime="2026-01-04 12:24:11.723659757 +0000 UTC m=+2167.196689628" Jan 04 12:24:12 crc kubenswrapper[5003]: I0104 12:24:12.817438 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e427c06f-1617-45a4-a334-ae8fb888ea62" path="/var/lib/kubelet/pods/e427c06f-1617-45a4-a334-ae8fb888ea62/volumes" Jan 04 12:24:17 crc kubenswrapper[5003]: I0104 12:24:17.442188 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-sm9h6" Jan 04 12:24:17 crc kubenswrapper[5003]: I0104 12:24:17.442771 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-sm9h6" Jan 04 12:24:17 crc kubenswrapper[5003]: I0104 12:24:17.518703 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-sm9h6" Jan 04 12:24:17 crc kubenswrapper[5003]: I0104 12:24:17.810603 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-sm9h6" Jan 04 12:24:17 crc kubenswrapper[5003]: I0104 12:24:17.872433 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sm9h6"] Jan 04 12:24:19 crc kubenswrapper[5003]: I0104 12:24:19.762620 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-sm9h6" podUID="12014332-9c53-41c7-9b74-22fef422edf6" containerName="registry-server" containerID="cri-o://0dde723ce5d3a7d7ddc42adb98c27c8a8e1d62bcb0c6ceacebe8c5d7029cff69" gracePeriod=2 Jan 04 12:24:20 crc kubenswrapper[5003]: I0104 12:24:20.703281 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sm9h6" Jan 04 12:24:20 crc kubenswrapper[5003]: I0104 12:24:20.771383 5003 generic.go:334] "Generic (PLEG): container finished" podID="12014332-9c53-41c7-9b74-22fef422edf6" containerID="0dde723ce5d3a7d7ddc42adb98c27c8a8e1d62bcb0c6ceacebe8c5d7029cff69" exitCode=0 Jan 04 12:24:20 crc kubenswrapper[5003]: I0104 12:24:20.771436 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sm9h6" event={"ID":"12014332-9c53-41c7-9b74-22fef422edf6","Type":"ContainerDied","Data":"0dde723ce5d3a7d7ddc42adb98c27c8a8e1d62bcb0c6ceacebe8c5d7029cff69"} Jan 04 12:24:20 crc kubenswrapper[5003]: I0104 12:24:20.771466 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-sm9h6" event={"ID":"12014332-9c53-41c7-9b74-22fef422edf6","Type":"ContainerDied","Data":"048eecdab7811ffd79de575428fc071687dbd8516ad8df2ee946f3615572d229"} Jan 04 12:24:20 crc kubenswrapper[5003]: I0104 12:24:20.771474 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-sm9h6" Jan 04 12:24:20 crc kubenswrapper[5003]: I0104 12:24:20.771490 5003 scope.go:117] "RemoveContainer" containerID="0dde723ce5d3a7d7ddc42adb98c27c8a8e1d62bcb0c6ceacebe8c5d7029cff69" Jan 04 12:24:20 crc kubenswrapper[5003]: I0104 12:24:20.795989 5003 scope.go:117] "RemoveContainer" containerID="06533007f150b4fd5296eb6b302e780aef1fb120d1b5ed585c7a445c5dfb3a29" Jan 04 12:24:20 crc kubenswrapper[5003]: I0104 12:24:20.813554 5003 scope.go:117] "RemoveContainer" containerID="e6f6e487d992edcf3ff993286bf495f0c2d8e14ced61cc6447fb0a6ae68d3deb" Jan 04 12:24:20 crc kubenswrapper[5003]: I0104 12:24:20.842551 5003 scope.go:117] "RemoveContainer" containerID="0dde723ce5d3a7d7ddc42adb98c27c8a8e1d62bcb0c6ceacebe8c5d7029cff69" Jan 04 12:24:20 crc kubenswrapper[5003]: E0104 12:24:20.843135 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0dde723ce5d3a7d7ddc42adb98c27c8a8e1d62bcb0c6ceacebe8c5d7029cff69\": container with ID starting with 0dde723ce5d3a7d7ddc42adb98c27c8a8e1d62bcb0c6ceacebe8c5d7029cff69 not found: ID does not exist" containerID="0dde723ce5d3a7d7ddc42adb98c27c8a8e1d62bcb0c6ceacebe8c5d7029cff69" Jan 04 12:24:20 crc kubenswrapper[5003]: I0104 12:24:20.843210 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0dde723ce5d3a7d7ddc42adb98c27c8a8e1d62bcb0c6ceacebe8c5d7029cff69"} err="failed to get container status \"0dde723ce5d3a7d7ddc42adb98c27c8a8e1d62bcb0c6ceacebe8c5d7029cff69\": rpc error: code = NotFound desc = could not find container \"0dde723ce5d3a7d7ddc42adb98c27c8a8e1d62bcb0c6ceacebe8c5d7029cff69\": container with ID starting with 0dde723ce5d3a7d7ddc42adb98c27c8a8e1d62bcb0c6ceacebe8c5d7029cff69 not found: ID does not exist" Jan 04 12:24:20 crc kubenswrapper[5003]: I0104 12:24:20.843237 5003 scope.go:117] "RemoveContainer" containerID="06533007f150b4fd5296eb6b302e780aef1fb120d1b5ed585c7a445c5dfb3a29" Jan 04 12:24:20 crc kubenswrapper[5003]: E0104 12:24:20.843676 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"06533007f150b4fd5296eb6b302e780aef1fb120d1b5ed585c7a445c5dfb3a29\": container with ID starting with 06533007f150b4fd5296eb6b302e780aef1fb120d1b5ed585c7a445c5dfb3a29 not found: ID does not exist" containerID="06533007f150b4fd5296eb6b302e780aef1fb120d1b5ed585c7a445c5dfb3a29" Jan 04 12:24:20 crc kubenswrapper[5003]: I0104 12:24:20.843719 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06533007f150b4fd5296eb6b302e780aef1fb120d1b5ed585c7a445c5dfb3a29"} err="failed to get container status \"06533007f150b4fd5296eb6b302e780aef1fb120d1b5ed585c7a445c5dfb3a29\": rpc error: code = NotFound desc = could not find container \"06533007f150b4fd5296eb6b302e780aef1fb120d1b5ed585c7a445c5dfb3a29\": container with ID starting with 06533007f150b4fd5296eb6b302e780aef1fb120d1b5ed585c7a445c5dfb3a29 not found: ID does not exist" Jan 04 12:24:20 crc kubenswrapper[5003]: I0104 12:24:20.843749 5003 scope.go:117] "RemoveContainer" containerID="e6f6e487d992edcf3ff993286bf495f0c2d8e14ced61cc6447fb0a6ae68d3deb" Jan 04 12:24:20 crc kubenswrapper[5003]: E0104 12:24:20.844305 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6f6e487d992edcf3ff993286bf495f0c2d8e14ced61cc6447fb0a6ae68d3deb\": container with ID starting with e6f6e487d992edcf3ff993286bf495f0c2d8e14ced61cc6447fb0a6ae68d3deb not found: ID does not exist" containerID="e6f6e487d992edcf3ff993286bf495f0c2d8e14ced61cc6447fb0a6ae68d3deb" Jan 04 12:24:20 crc kubenswrapper[5003]: I0104 12:24:20.844337 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6f6e487d992edcf3ff993286bf495f0c2d8e14ced61cc6447fb0a6ae68d3deb"} err="failed to get container status \"e6f6e487d992edcf3ff993286bf495f0c2d8e14ced61cc6447fb0a6ae68d3deb\": rpc error: code = NotFound desc = could not find container \"e6f6e487d992edcf3ff993286bf495f0c2d8e14ced61cc6447fb0a6ae68d3deb\": container with ID starting with e6f6e487d992edcf3ff993286bf495f0c2d8e14ced61cc6447fb0a6ae68d3deb not found: ID does not exist" Jan 04 12:24:20 crc kubenswrapper[5003]: I0104 12:24:20.889918 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12014332-9c53-41c7-9b74-22fef422edf6-utilities\") pod \"12014332-9c53-41c7-9b74-22fef422edf6\" (UID: \"12014332-9c53-41c7-9b74-22fef422edf6\") " Jan 04 12:24:20 crc kubenswrapper[5003]: I0104 12:24:20.889992 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2z5bc\" (UniqueName: \"kubernetes.io/projected/12014332-9c53-41c7-9b74-22fef422edf6-kube-api-access-2z5bc\") pod \"12014332-9c53-41c7-9b74-22fef422edf6\" (UID: \"12014332-9c53-41c7-9b74-22fef422edf6\") " Jan 04 12:24:20 crc kubenswrapper[5003]: I0104 12:24:20.890479 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12014332-9c53-41c7-9b74-22fef422edf6-catalog-content\") pod \"12014332-9c53-41c7-9b74-22fef422edf6\" (UID: \"12014332-9c53-41c7-9b74-22fef422edf6\") " Jan 04 12:24:20 crc kubenswrapper[5003]: I0104 12:24:20.893268 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12014332-9c53-41c7-9b74-22fef422edf6-utilities" (OuterVolumeSpecName: "utilities") pod "12014332-9c53-41c7-9b74-22fef422edf6" (UID: "12014332-9c53-41c7-9b74-22fef422edf6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:24:20 crc kubenswrapper[5003]: I0104 12:24:20.899296 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12014332-9c53-41c7-9b74-22fef422edf6-kube-api-access-2z5bc" (OuterVolumeSpecName: "kube-api-access-2z5bc") pod "12014332-9c53-41c7-9b74-22fef422edf6" (UID: "12014332-9c53-41c7-9b74-22fef422edf6"). InnerVolumeSpecName "kube-api-access-2z5bc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:24:20 crc kubenswrapper[5003]: I0104 12:24:20.928510 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12014332-9c53-41c7-9b74-22fef422edf6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "12014332-9c53-41c7-9b74-22fef422edf6" (UID: "12014332-9c53-41c7-9b74-22fef422edf6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:24:20 crc kubenswrapper[5003]: I0104 12:24:20.992843 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2z5bc\" (UniqueName: \"kubernetes.io/projected/12014332-9c53-41c7-9b74-22fef422edf6-kube-api-access-2z5bc\") on node \"crc\" DevicePath \"\"" Jan 04 12:24:20 crc kubenswrapper[5003]: I0104 12:24:20.992890 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12014332-9c53-41c7-9b74-22fef422edf6-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:24:20 crc kubenswrapper[5003]: I0104 12:24:20.992911 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12014332-9c53-41c7-9b74-22fef422edf6-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:24:21 crc kubenswrapper[5003]: I0104 12:24:21.132939 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-sm9h6"] Jan 04 12:24:21 crc kubenswrapper[5003]: I0104 12:24:21.141962 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-sm9h6"] Jan 04 12:24:22 crc kubenswrapper[5003]: I0104 12:24:22.827677 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12014332-9c53-41c7-9b74-22fef422edf6" path="/var/lib/kubelet/pods/12014332-9c53-41c7-9b74-22fef422edf6/volumes" Jan 04 12:24:39 crc kubenswrapper[5003]: I0104 12:24:39.418580 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:24:39 crc kubenswrapper[5003]: I0104 12:24:39.419149 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:25:09 crc kubenswrapper[5003]: I0104 12:25:09.419224 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:25:09 crc kubenswrapper[5003]: I0104 12:25:09.421368 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:25:39 crc kubenswrapper[5003]: I0104 12:25:39.418495 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:25:39 crc kubenswrapper[5003]: I0104 12:25:39.419458 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:25:39 crc kubenswrapper[5003]: I0104 12:25:39.419530 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 12:25:39 crc kubenswrapper[5003]: I0104 12:25:39.420413 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3d5f526caf6b6337fdcc0c4e9bf6e07de138ca901a8a09773972e393184abcb2"} pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 12:25:39 crc kubenswrapper[5003]: I0104 12:25:39.420483 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" containerID="cri-o://3d5f526caf6b6337fdcc0c4e9bf6e07de138ca901a8a09773972e393184abcb2" gracePeriod=600 Jan 04 12:25:39 crc kubenswrapper[5003]: E0104 12:25:39.563644 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:25:40 crc kubenswrapper[5003]: I0104 12:25:40.517106 5003 generic.go:334] "Generic (PLEG): container finished" podID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerID="3d5f526caf6b6337fdcc0c4e9bf6e07de138ca901a8a09773972e393184abcb2" exitCode=0 Jan 04 12:25:40 crc kubenswrapper[5003]: I0104 12:25:40.517216 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerDied","Data":"3d5f526caf6b6337fdcc0c4e9bf6e07de138ca901a8a09773972e393184abcb2"} Jan 04 12:25:40 crc kubenswrapper[5003]: I0104 12:25:40.517295 5003 scope.go:117] "RemoveContainer" containerID="03cda3b480f1dacd648b6d7c3cd6d11635544a79c0682834bde5c1ee008085d6" Jan 04 12:25:40 crc kubenswrapper[5003]: I0104 12:25:40.518695 5003 scope.go:117] "RemoveContainer" containerID="3d5f526caf6b6337fdcc0c4e9bf6e07de138ca901a8a09773972e393184abcb2" Jan 04 12:25:40 crc kubenswrapper[5003]: E0104 12:25:40.519393 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:25:52 crc kubenswrapper[5003]: I0104 12:25:52.807281 5003 scope.go:117] "RemoveContainer" containerID="3d5f526caf6b6337fdcc0c4e9bf6e07de138ca901a8a09773972e393184abcb2" Jan 04 12:25:52 crc kubenswrapper[5003]: E0104 12:25:52.808525 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:26:06 crc kubenswrapper[5003]: I0104 12:26:06.807256 5003 scope.go:117] "RemoveContainer" containerID="3d5f526caf6b6337fdcc0c4e9bf6e07de138ca901a8a09773972e393184abcb2" Jan 04 12:26:06 crc kubenswrapper[5003]: E0104 12:26:06.808163 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:26:20 crc kubenswrapper[5003]: I0104 12:26:20.807540 5003 scope.go:117] "RemoveContainer" containerID="3d5f526caf6b6337fdcc0c4e9bf6e07de138ca901a8a09773972e393184abcb2" Jan 04 12:26:20 crc kubenswrapper[5003]: E0104 12:26:20.808526 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:26:33 crc kubenswrapper[5003]: I0104 12:26:33.807619 5003 scope.go:117] "RemoveContainer" containerID="3d5f526caf6b6337fdcc0c4e9bf6e07de138ca901a8a09773972e393184abcb2" Jan 04 12:26:33 crc kubenswrapper[5003]: E0104 12:26:33.808759 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:26:47 crc kubenswrapper[5003]: I0104 12:26:47.806708 5003 scope.go:117] "RemoveContainer" containerID="3d5f526caf6b6337fdcc0c4e9bf6e07de138ca901a8a09773972e393184abcb2" Jan 04 12:26:47 crc kubenswrapper[5003]: E0104 12:26:47.810161 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:27:02 crc kubenswrapper[5003]: I0104 12:27:02.807860 5003 scope.go:117] "RemoveContainer" containerID="3d5f526caf6b6337fdcc0c4e9bf6e07de138ca901a8a09773972e393184abcb2" Jan 04 12:27:02 crc kubenswrapper[5003]: E0104 12:27:02.809471 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:27:15 crc kubenswrapper[5003]: I0104 12:27:15.806718 5003 scope.go:117] "RemoveContainer" containerID="3d5f526caf6b6337fdcc0c4e9bf6e07de138ca901a8a09773972e393184abcb2" Jan 04 12:27:15 crc kubenswrapper[5003]: E0104 12:27:15.807740 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:27:29 crc kubenswrapper[5003]: I0104 12:27:29.806341 5003 scope.go:117] "RemoveContainer" containerID="3d5f526caf6b6337fdcc0c4e9bf6e07de138ca901a8a09773972e393184abcb2" Jan 04 12:27:29 crc kubenswrapper[5003]: E0104 12:27:29.807321 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:27:42 crc kubenswrapper[5003]: I0104 12:27:42.806436 5003 scope.go:117] "RemoveContainer" containerID="3d5f526caf6b6337fdcc0c4e9bf6e07de138ca901a8a09773972e393184abcb2" Jan 04 12:27:42 crc kubenswrapper[5003]: E0104 12:27:42.807170 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:27:55 crc kubenswrapper[5003]: I0104 12:27:55.807463 5003 scope.go:117] "RemoveContainer" containerID="3d5f526caf6b6337fdcc0c4e9bf6e07de138ca901a8a09773972e393184abcb2" Jan 04 12:27:55 crc kubenswrapper[5003]: E0104 12:27:55.808356 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:28:07 crc kubenswrapper[5003]: I0104 12:28:07.806694 5003 scope.go:117] "RemoveContainer" containerID="3d5f526caf6b6337fdcc0c4e9bf6e07de138ca901a8a09773972e393184abcb2" Jan 04 12:28:07 crc kubenswrapper[5003]: E0104 12:28:07.807446 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:28:20 crc kubenswrapper[5003]: I0104 12:28:20.807109 5003 scope.go:117] "RemoveContainer" containerID="3d5f526caf6b6337fdcc0c4e9bf6e07de138ca901a8a09773972e393184abcb2" Jan 04 12:28:20 crc kubenswrapper[5003]: E0104 12:28:20.808286 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:28:31 crc kubenswrapper[5003]: I0104 12:28:31.806977 5003 scope.go:117] "RemoveContainer" containerID="3d5f526caf6b6337fdcc0c4e9bf6e07de138ca901a8a09773972e393184abcb2" Jan 04 12:28:31 crc kubenswrapper[5003]: E0104 12:28:31.808208 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:28:42 crc kubenswrapper[5003]: I0104 12:28:42.807379 5003 scope.go:117] "RemoveContainer" containerID="3d5f526caf6b6337fdcc0c4e9bf6e07de138ca901a8a09773972e393184abcb2" Jan 04 12:28:42 crc kubenswrapper[5003]: E0104 12:28:42.808455 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:28:53 crc kubenswrapper[5003]: I0104 12:28:53.806592 5003 scope.go:117] "RemoveContainer" containerID="3d5f526caf6b6337fdcc0c4e9bf6e07de138ca901a8a09773972e393184abcb2" Jan 04 12:28:53 crc kubenswrapper[5003]: E0104 12:28:53.808497 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:29:06 crc kubenswrapper[5003]: I0104 12:29:06.807268 5003 scope.go:117] "RemoveContainer" containerID="3d5f526caf6b6337fdcc0c4e9bf6e07de138ca901a8a09773972e393184abcb2" Jan 04 12:29:06 crc kubenswrapper[5003]: E0104 12:29:06.808329 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:29:19 crc kubenswrapper[5003]: I0104 12:29:19.807518 5003 scope.go:117] "RemoveContainer" containerID="3d5f526caf6b6337fdcc0c4e9bf6e07de138ca901a8a09773972e393184abcb2" Jan 04 12:29:19 crc kubenswrapper[5003]: E0104 12:29:19.808989 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:29:31 crc kubenswrapper[5003]: I0104 12:29:31.806867 5003 scope.go:117] "RemoveContainer" containerID="3d5f526caf6b6337fdcc0c4e9bf6e07de138ca901a8a09773972e393184abcb2" Jan 04 12:29:31 crc kubenswrapper[5003]: E0104 12:29:31.808077 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:29:43 crc kubenswrapper[5003]: I0104 12:29:43.808250 5003 scope.go:117] "RemoveContainer" containerID="3d5f526caf6b6337fdcc0c4e9bf6e07de138ca901a8a09773972e393184abcb2" Jan 04 12:29:43 crc kubenswrapper[5003]: E0104 12:29:43.809980 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:29:57 crc kubenswrapper[5003]: I0104 12:29:57.808324 5003 scope.go:117] "RemoveContainer" containerID="3d5f526caf6b6337fdcc0c4e9bf6e07de138ca901a8a09773972e393184abcb2" Jan 04 12:29:57 crc kubenswrapper[5003]: E0104 12:29:57.810148 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:30:00 crc kubenswrapper[5003]: I0104 12:30:00.160634 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458830-x5wfl"] Jan 04 12:30:00 crc kubenswrapper[5003]: E0104 12:30:00.161517 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12014332-9c53-41c7-9b74-22fef422edf6" containerName="extract-content" Jan 04 12:30:00 crc kubenswrapper[5003]: I0104 12:30:00.161541 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="12014332-9c53-41c7-9b74-22fef422edf6" containerName="extract-content" Jan 04 12:30:00 crc kubenswrapper[5003]: E0104 12:30:00.161568 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12014332-9c53-41c7-9b74-22fef422edf6" containerName="extract-utilities" Jan 04 12:30:00 crc kubenswrapper[5003]: I0104 12:30:00.161580 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="12014332-9c53-41c7-9b74-22fef422edf6" containerName="extract-utilities" Jan 04 12:30:00 crc kubenswrapper[5003]: E0104 12:30:00.161597 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e427c06f-1617-45a4-a334-ae8fb888ea62" containerName="extract-content" Jan 04 12:30:00 crc kubenswrapper[5003]: I0104 12:30:00.161618 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e427c06f-1617-45a4-a334-ae8fb888ea62" containerName="extract-content" Jan 04 12:30:00 crc kubenswrapper[5003]: E0104 12:30:00.161656 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e427c06f-1617-45a4-a334-ae8fb888ea62" containerName="registry-server" Jan 04 12:30:00 crc kubenswrapper[5003]: I0104 12:30:00.161675 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e427c06f-1617-45a4-a334-ae8fb888ea62" containerName="registry-server" Jan 04 12:30:00 crc kubenswrapper[5003]: E0104 12:30:00.161709 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12014332-9c53-41c7-9b74-22fef422edf6" containerName="registry-server" Jan 04 12:30:00 crc kubenswrapper[5003]: I0104 12:30:00.161724 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="12014332-9c53-41c7-9b74-22fef422edf6" containerName="registry-server" Jan 04 12:30:00 crc kubenswrapper[5003]: E0104 12:30:00.161750 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e427c06f-1617-45a4-a334-ae8fb888ea62" containerName="extract-utilities" Jan 04 12:30:00 crc kubenswrapper[5003]: I0104 12:30:00.161767 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e427c06f-1617-45a4-a334-ae8fb888ea62" containerName="extract-utilities" Jan 04 12:30:00 crc kubenswrapper[5003]: I0104 12:30:00.162130 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="12014332-9c53-41c7-9b74-22fef422edf6" containerName="registry-server" Jan 04 12:30:00 crc kubenswrapper[5003]: I0104 12:30:00.162159 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e427c06f-1617-45a4-a334-ae8fb888ea62" containerName="registry-server" Jan 04 12:30:00 crc kubenswrapper[5003]: I0104 12:30:00.163085 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458830-x5wfl" Jan 04 12:30:00 crc kubenswrapper[5003]: I0104 12:30:00.166135 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 04 12:30:00 crc kubenswrapper[5003]: I0104 12:30:00.166887 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 04 12:30:00 crc kubenswrapper[5003]: I0104 12:30:00.177705 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458830-x5wfl"] Jan 04 12:30:00 crc kubenswrapper[5003]: I0104 12:30:00.280672 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bj7ts\" (UniqueName: \"kubernetes.io/projected/6e311c4b-0ab7-4985-bd09-7da18cd2b17c-kube-api-access-bj7ts\") pod \"collect-profiles-29458830-x5wfl\" (UID: \"6e311c4b-0ab7-4985-bd09-7da18cd2b17c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458830-x5wfl" Jan 04 12:30:00 crc kubenswrapper[5003]: I0104 12:30:00.281112 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6e311c4b-0ab7-4985-bd09-7da18cd2b17c-config-volume\") pod \"collect-profiles-29458830-x5wfl\" (UID: \"6e311c4b-0ab7-4985-bd09-7da18cd2b17c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458830-x5wfl" Jan 04 12:30:00 crc kubenswrapper[5003]: I0104 12:30:00.281183 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6e311c4b-0ab7-4985-bd09-7da18cd2b17c-secret-volume\") pod \"collect-profiles-29458830-x5wfl\" (UID: \"6e311c4b-0ab7-4985-bd09-7da18cd2b17c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458830-x5wfl" Jan 04 12:30:00 crc kubenswrapper[5003]: I0104 12:30:00.382262 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6e311c4b-0ab7-4985-bd09-7da18cd2b17c-config-volume\") pod \"collect-profiles-29458830-x5wfl\" (UID: \"6e311c4b-0ab7-4985-bd09-7da18cd2b17c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458830-x5wfl" Jan 04 12:30:00 crc kubenswrapper[5003]: I0104 12:30:00.382307 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6e311c4b-0ab7-4985-bd09-7da18cd2b17c-secret-volume\") pod \"collect-profiles-29458830-x5wfl\" (UID: \"6e311c4b-0ab7-4985-bd09-7da18cd2b17c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458830-x5wfl" Jan 04 12:30:00 crc kubenswrapper[5003]: I0104 12:30:00.382351 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bj7ts\" (UniqueName: \"kubernetes.io/projected/6e311c4b-0ab7-4985-bd09-7da18cd2b17c-kube-api-access-bj7ts\") pod \"collect-profiles-29458830-x5wfl\" (UID: \"6e311c4b-0ab7-4985-bd09-7da18cd2b17c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458830-x5wfl" Jan 04 12:30:00 crc kubenswrapper[5003]: I0104 12:30:00.383600 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6e311c4b-0ab7-4985-bd09-7da18cd2b17c-config-volume\") pod \"collect-profiles-29458830-x5wfl\" (UID: \"6e311c4b-0ab7-4985-bd09-7da18cd2b17c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458830-x5wfl" Jan 04 12:30:00 crc kubenswrapper[5003]: I0104 12:30:00.393097 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6e311c4b-0ab7-4985-bd09-7da18cd2b17c-secret-volume\") pod \"collect-profiles-29458830-x5wfl\" (UID: \"6e311c4b-0ab7-4985-bd09-7da18cd2b17c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458830-x5wfl" Jan 04 12:30:00 crc kubenswrapper[5003]: I0104 12:30:00.404367 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bj7ts\" (UniqueName: \"kubernetes.io/projected/6e311c4b-0ab7-4985-bd09-7da18cd2b17c-kube-api-access-bj7ts\") pod \"collect-profiles-29458830-x5wfl\" (UID: \"6e311c4b-0ab7-4985-bd09-7da18cd2b17c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458830-x5wfl" Jan 04 12:30:00 crc kubenswrapper[5003]: I0104 12:30:00.492255 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458830-x5wfl" Jan 04 12:30:00 crc kubenswrapper[5003]: I0104 12:30:00.918351 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458830-x5wfl"] Jan 04 12:30:00 crc kubenswrapper[5003]: I0104 12:30:00.936100 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458830-x5wfl" event={"ID":"6e311c4b-0ab7-4985-bd09-7da18cd2b17c","Type":"ContainerStarted","Data":"fddd18e1fcdd23440c0f5a79e062adf4bea39aa0c3ceded64f37730834f06293"} Jan 04 12:30:01 crc kubenswrapper[5003]: I0104 12:30:01.949923 5003 generic.go:334] "Generic (PLEG): container finished" podID="6e311c4b-0ab7-4985-bd09-7da18cd2b17c" containerID="a453779df5529599191a566b7afd4d90c151af89bc4daa9be2cc1251b7fec7c7" exitCode=0 Jan 04 12:30:01 crc kubenswrapper[5003]: I0104 12:30:01.949997 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458830-x5wfl" event={"ID":"6e311c4b-0ab7-4985-bd09-7da18cd2b17c","Type":"ContainerDied","Data":"a453779df5529599191a566b7afd4d90c151af89bc4daa9be2cc1251b7fec7c7"} Jan 04 12:30:03 crc kubenswrapper[5003]: I0104 12:30:03.260402 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458830-x5wfl" Jan 04 12:30:03 crc kubenswrapper[5003]: I0104 12:30:03.323301 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6e311c4b-0ab7-4985-bd09-7da18cd2b17c-secret-volume\") pod \"6e311c4b-0ab7-4985-bd09-7da18cd2b17c\" (UID: \"6e311c4b-0ab7-4985-bd09-7da18cd2b17c\") " Jan 04 12:30:03 crc kubenswrapper[5003]: I0104 12:30:03.323374 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6e311c4b-0ab7-4985-bd09-7da18cd2b17c-config-volume\") pod \"6e311c4b-0ab7-4985-bd09-7da18cd2b17c\" (UID: \"6e311c4b-0ab7-4985-bd09-7da18cd2b17c\") " Jan 04 12:30:03 crc kubenswrapper[5003]: I0104 12:30:03.323418 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bj7ts\" (UniqueName: \"kubernetes.io/projected/6e311c4b-0ab7-4985-bd09-7da18cd2b17c-kube-api-access-bj7ts\") pod \"6e311c4b-0ab7-4985-bd09-7da18cd2b17c\" (UID: \"6e311c4b-0ab7-4985-bd09-7da18cd2b17c\") " Jan 04 12:30:03 crc kubenswrapper[5003]: I0104 12:30:03.326660 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e311c4b-0ab7-4985-bd09-7da18cd2b17c-config-volume" (OuterVolumeSpecName: "config-volume") pod "6e311c4b-0ab7-4985-bd09-7da18cd2b17c" (UID: "6e311c4b-0ab7-4985-bd09-7da18cd2b17c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:30:03 crc kubenswrapper[5003]: I0104 12:30:03.329665 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e311c4b-0ab7-4985-bd09-7da18cd2b17c-kube-api-access-bj7ts" (OuterVolumeSpecName: "kube-api-access-bj7ts") pod "6e311c4b-0ab7-4985-bd09-7da18cd2b17c" (UID: "6e311c4b-0ab7-4985-bd09-7da18cd2b17c"). InnerVolumeSpecName "kube-api-access-bj7ts". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:30:03 crc kubenswrapper[5003]: I0104 12:30:03.329963 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e311c4b-0ab7-4985-bd09-7da18cd2b17c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "6e311c4b-0ab7-4985-bd09-7da18cd2b17c" (UID: "6e311c4b-0ab7-4985-bd09-7da18cd2b17c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:30:03 crc kubenswrapper[5003]: I0104 12:30:03.425607 5003 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6e311c4b-0ab7-4985-bd09-7da18cd2b17c-config-volume\") on node \"crc\" DevicePath \"\"" Jan 04 12:30:03 crc kubenswrapper[5003]: I0104 12:30:03.425647 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bj7ts\" (UniqueName: \"kubernetes.io/projected/6e311c4b-0ab7-4985-bd09-7da18cd2b17c-kube-api-access-bj7ts\") on node \"crc\" DevicePath \"\"" Jan 04 12:30:03 crc kubenswrapper[5003]: I0104 12:30:03.425660 5003 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6e311c4b-0ab7-4985-bd09-7da18cd2b17c-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 04 12:30:03 crc kubenswrapper[5003]: I0104 12:30:03.965003 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458830-x5wfl" event={"ID":"6e311c4b-0ab7-4985-bd09-7da18cd2b17c","Type":"ContainerDied","Data":"fddd18e1fcdd23440c0f5a79e062adf4bea39aa0c3ceded64f37730834f06293"} Jan 04 12:30:03 crc kubenswrapper[5003]: I0104 12:30:03.965068 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fddd18e1fcdd23440c0f5a79e062adf4bea39aa0c3ceded64f37730834f06293" Jan 04 12:30:03 crc kubenswrapper[5003]: I0104 12:30:03.965386 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458830-x5wfl" Jan 04 12:30:04 crc kubenswrapper[5003]: I0104 12:30:04.354987 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458785-q4d9v"] Jan 04 12:30:04 crc kubenswrapper[5003]: I0104 12:30:04.360571 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458785-q4d9v"] Jan 04 12:30:04 crc kubenswrapper[5003]: I0104 12:30:04.816990 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1886cfcf-db6d-49b2-8f0a-4637996373db" path="/var/lib/kubelet/pods/1886cfcf-db6d-49b2-8f0a-4637996373db/volumes" Jan 04 12:30:11 crc kubenswrapper[5003]: I0104 12:30:11.807285 5003 scope.go:117] "RemoveContainer" containerID="3d5f526caf6b6337fdcc0c4e9bf6e07de138ca901a8a09773972e393184abcb2" Jan 04 12:30:11 crc kubenswrapper[5003]: E0104 12:30:11.808729 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:30:13 crc kubenswrapper[5003]: I0104 12:30:13.053560 5003 scope.go:117] "RemoveContainer" containerID="00707828a2f582e1f0ffd5a56dba21044e3b453f514e280d8f8df9f88e721644" Jan 04 12:30:25 crc kubenswrapper[5003]: I0104 12:30:25.807474 5003 scope.go:117] "RemoveContainer" containerID="3d5f526caf6b6337fdcc0c4e9bf6e07de138ca901a8a09773972e393184abcb2" Jan 04 12:30:25 crc kubenswrapper[5003]: E0104 12:30:25.808953 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:30:36 crc kubenswrapper[5003]: I0104 12:30:36.807452 5003 scope.go:117] "RemoveContainer" containerID="3d5f526caf6b6337fdcc0c4e9bf6e07de138ca901a8a09773972e393184abcb2" Jan 04 12:30:36 crc kubenswrapper[5003]: E0104 12:30:36.808764 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:30:51 crc kubenswrapper[5003]: I0104 12:30:51.807900 5003 scope.go:117] "RemoveContainer" containerID="3d5f526caf6b6337fdcc0c4e9bf6e07de138ca901a8a09773972e393184abcb2" Jan 04 12:30:52 crc kubenswrapper[5003]: I0104 12:30:52.458192 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerStarted","Data":"eaf0e8ffbd4b0dcc1b6bcafeb8e26b2b9e3e1b04b6362e8457206a7f09e7d1cb"} Jan 04 12:32:36 crc kubenswrapper[5003]: I0104 12:32:36.881214 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-kzl2p"] Jan 04 12:32:36 crc kubenswrapper[5003]: E0104 12:32:36.882741 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e311c4b-0ab7-4985-bd09-7da18cd2b17c" containerName="collect-profiles" Jan 04 12:32:36 crc kubenswrapper[5003]: I0104 12:32:36.882778 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e311c4b-0ab7-4985-bd09-7da18cd2b17c" containerName="collect-profiles" Jan 04 12:32:36 crc kubenswrapper[5003]: I0104 12:32:36.883261 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e311c4b-0ab7-4985-bd09-7da18cd2b17c" containerName="collect-profiles" Jan 04 12:32:36 crc kubenswrapper[5003]: I0104 12:32:36.904900 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kzl2p"] Jan 04 12:32:36 crc kubenswrapper[5003]: I0104 12:32:36.905103 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kzl2p" Jan 04 12:32:36 crc kubenswrapper[5003]: I0104 12:32:36.922006 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f83e12ea-16c5-4d49-a8f9-f3f605c80a7e-utilities\") pod \"certified-operators-kzl2p\" (UID: \"f83e12ea-16c5-4d49-a8f9-f3f605c80a7e\") " pod="openshift-marketplace/certified-operators-kzl2p" Jan 04 12:32:36 crc kubenswrapper[5003]: I0104 12:32:36.922177 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c49mt\" (UniqueName: \"kubernetes.io/projected/f83e12ea-16c5-4d49-a8f9-f3f605c80a7e-kube-api-access-c49mt\") pod \"certified-operators-kzl2p\" (UID: \"f83e12ea-16c5-4d49-a8f9-f3f605c80a7e\") " pod="openshift-marketplace/certified-operators-kzl2p" Jan 04 12:32:36 crc kubenswrapper[5003]: I0104 12:32:36.922336 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f83e12ea-16c5-4d49-a8f9-f3f605c80a7e-catalog-content\") pod \"certified-operators-kzl2p\" (UID: \"f83e12ea-16c5-4d49-a8f9-f3f605c80a7e\") " pod="openshift-marketplace/certified-operators-kzl2p" Jan 04 12:32:37 crc kubenswrapper[5003]: I0104 12:32:37.024795 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f83e12ea-16c5-4d49-a8f9-f3f605c80a7e-catalog-content\") pod \"certified-operators-kzl2p\" (UID: \"f83e12ea-16c5-4d49-a8f9-f3f605c80a7e\") " pod="openshift-marketplace/certified-operators-kzl2p" Jan 04 12:32:37 crc kubenswrapper[5003]: I0104 12:32:37.024893 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f83e12ea-16c5-4d49-a8f9-f3f605c80a7e-utilities\") pod \"certified-operators-kzl2p\" (UID: \"f83e12ea-16c5-4d49-a8f9-f3f605c80a7e\") " pod="openshift-marketplace/certified-operators-kzl2p" Jan 04 12:32:37 crc kubenswrapper[5003]: I0104 12:32:37.024917 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c49mt\" (UniqueName: \"kubernetes.io/projected/f83e12ea-16c5-4d49-a8f9-f3f605c80a7e-kube-api-access-c49mt\") pod \"certified-operators-kzl2p\" (UID: \"f83e12ea-16c5-4d49-a8f9-f3f605c80a7e\") " pod="openshift-marketplace/certified-operators-kzl2p" Jan 04 12:32:37 crc kubenswrapper[5003]: I0104 12:32:37.026094 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f83e12ea-16c5-4d49-a8f9-f3f605c80a7e-catalog-content\") pod \"certified-operators-kzl2p\" (UID: \"f83e12ea-16c5-4d49-a8f9-f3f605c80a7e\") " pod="openshift-marketplace/certified-operators-kzl2p" Jan 04 12:32:37 crc kubenswrapper[5003]: I0104 12:32:37.026119 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f83e12ea-16c5-4d49-a8f9-f3f605c80a7e-utilities\") pod \"certified-operators-kzl2p\" (UID: \"f83e12ea-16c5-4d49-a8f9-f3f605c80a7e\") " pod="openshift-marketplace/certified-operators-kzl2p" Jan 04 12:32:37 crc kubenswrapper[5003]: I0104 12:32:37.042811 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c49mt\" (UniqueName: \"kubernetes.io/projected/f83e12ea-16c5-4d49-a8f9-f3f605c80a7e-kube-api-access-c49mt\") pod \"certified-operators-kzl2p\" (UID: \"f83e12ea-16c5-4d49-a8f9-f3f605c80a7e\") " pod="openshift-marketplace/certified-operators-kzl2p" Jan 04 12:32:37 crc kubenswrapper[5003]: I0104 12:32:37.248703 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kzl2p" Jan 04 12:32:37 crc kubenswrapper[5003]: I0104 12:32:37.689453 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kzl2p"] Jan 04 12:32:38 crc kubenswrapper[5003]: I0104 12:32:38.400217 5003 generic.go:334] "Generic (PLEG): container finished" podID="f83e12ea-16c5-4d49-a8f9-f3f605c80a7e" containerID="f71cc2c5c7c7aecee234e56e0e26313643bfeaa6f9b1d189c57e7c43599345db" exitCode=0 Jan 04 12:32:38 crc kubenswrapper[5003]: I0104 12:32:38.400293 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kzl2p" event={"ID":"f83e12ea-16c5-4d49-a8f9-f3f605c80a7e","Type":"ContainerDied","Data":"f71cc2c5c7c7aecee234e56e0e26313643bfeaa6f9b1d189c57e7c43599345db"} Jan 04 12:32:38 crc kubenswrapper[5003]: I0104 12:32:38.400322 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kzl2p" event={"ID":"f83e12ea-16c5-4d49-a8f9-f3f605c80a7e","Type":"ContainerStarted","Data":"68d37a3ca780d46d49197a400f837804236a3577cbbb9cd859ed5ddecef856cb"} Jan 04 12:32:38 crc kubenswrapper[5003]: I0104 12:32:38.403547 5003 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 04 12:32:40 crc kubenswrapper[5003]: I0104 12:32:40.430431 5003 generic.go:334] "Generic (PLEG): container finished" podID="f83e12ea-16c5-4d49-a8f9-f3f605c80a7e" containerID="b149c750814ab11e1f2951fec8f1e0240cd50edca9978862d8c1c8834dd2bcf6" exitCode=0 Jan 04 12:32:40 crc kubenswrapper[5003]: I0104 12:32:40.430535 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kzl2p" event={"ID":"f83e12ea-16c5-4d49-a8f9-f3f605c80a7e","Type":"ContainerDied","Data":"b149c750814ab11e1f2951fec8f1e0240cd50edca9978862d8c1c8834dd2bcf6"} Jan 04 12:32:41 crc kubenswrapper[5003]: I0104 12:32:41.440947 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kzl2p" event={"ID":"f83e12ea-16c5-4d49-a8f9-f3f605c80a7e","Type":"ContainerStarted","Data":"779a0c9b21dc06dfd79786d2c578105692e865edce59ca2b3a741f8aaede35cd"} Jan 04 12:32:41 crc kubenswrapper[5003]: I0104 12:32:41.476844 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-kzl2p" podStartSLOduration=2.912577694 podStartE2EDuration="5.476817522s" podCreationTimestamp="2026-01-04 12:32:36 +0000 UTC" firstStartedPulling="2026-01-04 12:32:38.40289419 +0000 UTC m=+2673.875924051" lastFinishedPulling="2026-01-04 12:32:40.967133998 +0000 UTC m=+2676.440163879" observedRunningTime="2026-01-04 12:32:41.468506934 +0000 UTC m=+2676.941536805" watchObservedRunningTime="2026-01-04 12:32:41.476817522 +0000 UTC m=+2676.949847373" Jan 04 12:32:47 crc kubenswrapper[5003]: I0104 12:32:47.249180 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-kzl2p" Jan 04 12:32:47 crc kubenswrapper[5003]: I0104 12:32:47.250990 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-kzl2p" Jan 04 12:32:47 crc kubenswrapper[5003]: I0104 12:32:47.325552 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-kzl2p" Jan 04 12:32:47 crc kubenswrapper[5003]: I0104 12:32:47.551128 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-kzl2p" Jan 04 12:32:47 crc kubenswrapper[5003]: I0104 12:32:47.619088 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kzl2p"] Jan 04 12:32:49 crc kubenswrapper[5003]: I0104 12:32:49.507779 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-kzl2p" podUID="f83e12ea-16c5-4d49-a8f9-f3f605c80a7e" containerName="registry-server" containerID="cri-o://779a0c9b21dc06dfd79786d2c578105692e865edce59ca2b3a741f8aaede35cd" gracePeriod=2 Jan 04 12:32:49 crc kubenswrapper[5003]: I0104 12:32:49.969868 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kzl2p" Jan 04 12:32:50 crc kubenswrapper[5003]: I0104 12:32:50.059538 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f83e12ea-16c5-4d49-a8f9-f3f605c80a7e-utilities\") pod \"f83e12ea-16c5-4d49-a8f9-f3f605c80a7e\" (UID: \"f83e12ea-16c5-4d49-a8f9-f3f605c80a7e\") " Jan 04 12:32:50 crc kubenswrapper[5003]: I0104 12:32:50.059900 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c49mt\" (UniqueName: \"kubernetes.io/projected/f83e12ea-16c5-4d49-a8f9-f3f605c80a7e-kube-api-access-c49mt\") pod \"f83e12ea-16c5-4d49-a8f9-f3f605c80a7e\" (UID: \"f83e12ea-16c5-4d49-a8f9-f3f605c80a7e\") " Jan 04 12:32:50 crc kubenswrapper[5003]: I0104 12:32:50.059950 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f83e12ea-16c5-4d49-a8f9-f3f605c80a7e-catalog-content\") pod \"f83e12ea-16c5-4d49-a8f9-f3f605c80a7e\" (UID: \"f83e12ea-16c5-4d49-a8f9-f3f605c80a7e\") " Jan 04 12:32:50 crc kubenswrapper[5003]: I0104 12:32:50.061107 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f83e12ea-16c5-4d49-a8f9-f3f605c80a7e-utilities" (OuterVolumeSpecName: "utilities") pod "f83e12ea-16c5-4d49-a8f9-f3f605c80a7e" (UID: "f83e12ea-16c5-4d49-a8f9-f3f605c80a7e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:32:50 crc kubenswrapper[5003]: I0104 12:32:50.074373 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f83e12ea-16c5-4d49-a8f9-f3f605c80a7e-kube-api-access-c49mt" (OuterVolumeSpecName: "kube-api-access-c49mt") pod "f83e12ea-16c5-4d49-a8f9-f3f605c80a7e" (UID: "f83e12ea-16c5-4d49-a8f9-f3f605c80a7e"). InnerVolumeSpecName "kube-api-access-c49mt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:32:50 crc kubenswrapper[5003]: I0104 12:32:50.162037 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f83e12ea-16c5-4d49-a8f9-f3f605c80a7e-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:32:50 crc kubenswrapper[5003]: I0104 12:32:50.162436 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c49mt\" (UniqueName: \"kubernetes.io/projected/f83e12ea-16c5-4d49-a8f9-f3f605c80a7e-kube-api-access-c49mt\") on node \"crc\" DevicePath \"\"" Jan 04 12:32:50 crc kubenswrapper[5003]: I0104 12:32:50.189882 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f83e12ea-16c5-4d49-a8f9-f3f605c80a7e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f83e12ea-16c5-4d49-a8f9-f3f605c80a7e" (UID: "f83e12ea-16c5-4d49-a8f9-f3f605c80a7e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:32:50 crc kubenswrapper[5003]: I0104 12:32:50.264631 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f83e12ea-16c5-4d49-a8f9-f3f605c80a7e-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:32:50 crc kubenswrapper[5003]: I0104 12:32:50.520985 5003 generic.go:334] "Generic (PLEG): container finished" podID="f83e12ea-16c5-4d49-a8f9-f3f605c80a7e" containerID="779a0c9b21dc06dfd79786d2c578105692e865edce59ca2b3a741f8aaede35cd" exitCode=0 Jan 04 12:32:50 crc kubenswrapper[5003]: I0104 12:32:50.521090 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kzl2p" event={"ID":"f83e12ea-16c5-4d49-a8f9-f3f605c80a7e","Type":"ContainerDied","Data":"779a0c9b21dc06dfd79786d2c578105692e865edce59ca2b3a741f8aaede35cd"} Jan 04 12:32:50 crc kubenswrapper[5003]: I0104 12:32:50.521132 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kzl2p" Jan 04 12:32:50 crc kubenswrapper[5003]: I0104 12:32:50.521171 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kzl2p" event={"ID":"f83e12ea-16c5-4d49-a8f9-f3f605c80a7e","Type":"ContainerDied","Data":"68d37a3ca780d46d49197a400f837804236a3577cbbb9cd859ed5ddecef856cb"} Jan 04 12:32:50 crc kubenswrapper[5003]: I0104 12:32:50.521213 5003 scope.go:117] "RemoveContainer" containerID="779a0c9b21dc06dfd79786d2c578105692e865edce59ca2b3a741f8aaede35cd" Jan 04 12:32:50 crc kubenswrapper[5003]: I0104 12:32:50.557383 5003 scope.go:117] "RemoveContainer" containerID="b149c750814ab11e1f2951fec8f1e0240cd50edca9978862d8c1c8834dd2bcf6" Jan 04 12:32:50 crc kubenswrapper[5003]: I0104 12:32:50.572670 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kzl2p"] Jan 04 12:32:50 crc kubenswrapper[5003]: I0104 12:32:50.579286 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-kzl2p"] Jan 04 12:32:50 crc kubenswrapper[5003]: I0104 12:32:50.599753 5003 scope.go:117] "RemoveContainer" containerID="f71cc2c5c7c7aecee234e56e0e26313643bfeaa6f9b1d189c57e7c43599345db" Jan 04 12:32:50 crc kubenswrapper[5003]: I0104 12:32:50.634467 5003 scope.go:117] "RemoveContainer" containerID="779a0c9b21dc06dfd79786d2c578105692e865edce59ca2b3a741f8aaede35cd" Jan 04 12:32:50 crc kubenswrapper[5003]: E0104 12:32:50.642980 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"779a0c9b21dc06dfd79786d2c578105692e865edce59ca2b3a741f8aaede35cd\": container with ID starting with 779a0c9b21dc06dfd79786d2c578105692e865edce59ca2b3a741f8aaede35cd not found: ID does not exist" containerID="779a0c9b21dc06dfd79786d2c578105692e865edce59ca2b3a741f8aaede35cd" Jan 04 12:32:50 crc kubenswrapper[5003]: I0104 12:32:50.643049 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"779a0c9b21dc06dfd79786d2c578105692e865edce59ca2b3a741f8aaede35cd"} err="failed to get container status \"779a0c9b21dc06dfd79786d2c578105692e865edce59ca2b3a741f8aaede35cd\": rpc error: code = NotFound desc = could not find container \"779a0c9b21dc06dfd79786d2c578105692e865edce59ca2b3a741f8aaede35cd\": container with ID starting with 779a0c9b21dc06dfd79786d2c578105692e865edce59ca2b3a741f8aaede35cd not found: ID does not exist" Jan 04 12:32:50 crc kubenswrapper[5003]: I0104 12:32:50.643084 5003 scope.go:117] "RemoveContainer" containerID="b149c750814ab11e1f2951fec8f1e0240cd50edca9978862d8c1c8834dd2bcf6" Jan 04 12:32:50 crc kubenswrapper[5003]: E0104 12:32:50.644006 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b149c750814ab11e1f2951fec8f1e0240cd50edca9978862d8c1c8834dd2bcf6\": container with ID starting with b149c750814ab11e1f2951fec8f1e0240cd50edca9978862d8c1c8834dd2bcf6 not found: ID does not exist" containerID="b149c750814ab11e1f2951fec8f1e0240cd50edca9978862d8c1c8834dd2bcf6" Jan 04 12:32:50 crc kubenswrapper[5003]: I0104 12:32:50.644050 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b149c750814ab11e1f2951fec8f1e0240cd50edca9978862d8c1c8834dd2bcf6"} err="failed to get container status \"b149c750814ab11e1f2951fec8f1e0240cd50edca9978862d8c1c8834dd2bcf6\": rpc error: code = NotFound desc = could not find container \"b149c750814ab11e1f2951fec8f1e0240cd50edca9978862d8c1c8834dd2bcf6\": container with ID starting with b149c750814ab11e1f2951fec8f1e0240cd50edca9978862d8c1c8834dd2bcf6 not found: ID does not exist" Jan 04 12:32:50 crc kubenswrapper[5003]: I0104 12:32:50.644066 5003 scope.go:117] "RemoveContainer" containerID="f71cc2c5c7c7aecee234e56e0e26313643bfeaa6f9b1d189c57e7c43599345db" Jan 04 12:32:50 crc kubenswrapper[5003]: E0104 12:32:50.644483 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f71cc2c5c7c7aecee234e56e0e26313643bfeaa6f9b1d189c57e7c43599345db\": container with ID starting with f71cc2c5c7c7aecee234e56e0e26313643bfeaa6f9b1d189c57e7c43599345db not found: ID does not exist" containerID="f71cc2c5c7c7aecee234e56e0e26313643bfeaa6f9b1d189c57e7c43599345db" Jan 04 12:32:50 crc kubenswrapper[5003]: I0104 12:32:50.644559 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f71cc2c5c7c7aecee234e56e0e26313643bfeaa6f9b1d189c57e7c43599345db"} err="failed to get container status \"f71cc2c5c7c7aecee234e56e0e26313643bfeaa6f9b1d189c57e7c43599345db\": rpc error: code = NotFound desc = could not find container \"f71cc2c5c7c7aecee234e56e0e26313643bfeaa6f9b1d189c57e7c43599345db\": container with ID starting with f71cc2c5c7c7aecee234e56e0e26313643bfeaa6f9b1d189c57e7c43599345db not found: ID does not exist" Jan 04 12:32:50 crc kubenswrapper[5003]: I0104 12:32:50.820844 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f83e12ea-16c5-4d49-a8f9-f3f605c80a7e" path="/var/lib/kubelet/pods/f83e12ea-16c5-4d49-a8f9-f3f605c80a7e/volumes" Jan 04 12:33:09 crc kubenswrapper[5003]: I0104 12:33:09.418800 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:33:09 crc kubenswrapper[5003]: I0104 12:33:09.419658 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:33:39 crc kubenswrapper[5003]: I0104 12:33:39.418675 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:33:39 crc kubenswrapper[5003]: I0104 12:33:39.419383 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:34:09 crc kubenswrapper[5003]: I0104 12:34:09.418856 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:34:09 crc kubenswrapper[5003]: I0104 12:34:09.419587 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:34:09 crc kubenswrapper[5003]: I0104 12:34:09.419649 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 12:34:09 crc kubenswrapper[5003]: I0104 12:34:09.420462 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"eaf0e8ffbd4b0dcc1b6bcafeb8e26b2b9e3e1b04b6362e8457206a7f09e7d1cb"} pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 12:34:09 crc kubenswrapper[5003]: I0104 12:34:09.420636 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" containerID="cri-o://eaf0e8ffbd4b0dcc1b6bcafeb8e26b2b9e3e1b04b6362e8457206a7f09e7d1cb" gracePeriod=600 Jan 04 12:34:10 crc kubenswrapper[5003]: I0104 12:34:10.244918 5003 generic.go:334] "Generic (PLEG): container finished" podID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerID="eaf0e8ffbd4b0dcc1b6bcafeb8e26b2b9e3e1b04b6362e8457206a7f09e7d1cb" exitCode=0 Jan 04 12:34:10 crc kubenswrapper[5003]: I0104 12:34:10.245054 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerDied","Data":"eaf0e8ffbd4b0dcc1b6bcafeb8e26b2b9e3e1b04b6362e8457206a7f09e7d1cb"} Jan 04 12:34:10 crc kubenswrapper[5003]: I0104 12:34:10.245371 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerStarted","Data":"4ddcf79a0d2f27b546d3e549b2efd4403aeb7886b75ace8bd8da38c929e841bc"} Jan 04 12:34:10 crc kubenswrapper[5003]: I0104 12:34:10.245400 5003 scope.go:117] "RemoveContainer" containerID="3d5f526caf6b6337fdcc0c4e9bf6e07de138ca901a8a09773972e393184abcb2" Jan 04 12:35:03 crc kubenswrapper[5003]: I0104 12:35:03.568746 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-r76gh"] Jan 04 12:35:03 crc kubenswrapper[5003]: E0104 12:35:03.570370 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f83e12ea-16c5-4d49-a8f9-f3f605c80a7e" containerName="extract-utilities" Jan 04 12:35:03 crc kubenswrapper[5003]: I0104 12:35:03.570399 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f83e12ea-16c5-4d49-a8f9-f3f605c80a7e" containerName="extract-utilities" Jan 04 12:35:03 crc kubenswrapper[5003]: E0104 12:35:03.570432 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f83e12ea-16c5-4d49-a8f9-f3f605c80a7e" containerName="extract-content" Jan 04 12:35:03 crc kubenswrapper[5003]: I0104 12:35:03.570445 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f83e12ea-16c5-4d49-a8f9-f3f605c80a7e" containerName="extract-content" Jan 04 12:35:03 crc kubenswrapper[5003]: E0104 12:35:03.570471 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f83e12ea-16c5-4d49-a8f9-f3f605c80a7e" containerName="registry-server" Jan 04 12:35:03 crc kubenswrapper[5003]: I0104 12:35:03.570486 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f83e12ea-16c5-4d49-a8f9-f3f605c80a7e" containerName="registry-server" Jan 04 12:35:03 crc kubenswrapper[5003]: I0104 12:35:03.570792 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f83e12ea-16c5-4d49-a8f9-f3f605c80a7e" containerName="registry-server" Jan 04 12:35:03 crc kubenswrapper[5003]: I0104 12:35:03.572960 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r76gh" Jan 04 12:35:03 crc kubenswrapper[5003]: I0104 12:35:03.586748 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-r76gh"] Jan 04 12:35:03 crc kubenswrapper[5003]: I0104 12:35:03.624999 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xmqn\" (UniqueName: \"kubernetes.io/projected/ed44593c-d08d-4d9a-9d75-5187e5187748-kube-api-access-8xmqn\") pod \"community-operators-r76gh\" (UID: \"ed44593c-d08d-4d9a-9d75-5187e5187748\") " pod="openshift-marketplace/community-operators-r76gh" Jan 04 12:35:03 crc kubenswrapper[5003]: I0104 12:35:03.625147 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed44593c-d08d-4d9a-9d75-5187e5187748-catalog-content\") pod \"community-operators-r76gh\" (UID: \"ed44593c-d08d-4d9a-9d75-5187e5187748\") " pod="openshift-marketplace/community-operators-r76gh" Jan 04 12:35:03 crc kubenswrapper[5003]: I0104 12:35:03.625627 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed44593c-d08d-4d9a-9d75-5187e5187748-utilities\") pod \"community-operators-r76gh\" (UID: \"ed44593c-d08d-4d9a-9d75-5187e5187748\") " pod="openshift-marketplace/community-operators-r76gh" Jan 04 12:35:03 crc kubenswrapper[5003]: I0104 12:35:03.726777 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xmqn\" (UniqueName: \"kubernetes.io/projected/ed44593c-d08d-4d9a-9d75-5187e5187748-kube-api-access-8xmqn\") pod \"community-operators-r76gh\" (UID: \"ed44593c-d08d-4d9a-9d75-5187e5187748\") " pod="openshift-marketplace/community-operators-r76gh" Jan 04 12:35:03 crc kubenswrapper[5003]: I0104 12:35:03.726853 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed44593c-d08d-4d9a-9d75-5187e5187748-catalog-content\") pod \"community-operators-r76gh\" (UID: \"ed44593c-d08d-4d9a-9d75-5187e5187748\") " pod="openshift-marketplace/community-operators-r76gh" Jan 04 12:35:03 crc kubenswrapper[5003]: I0104 12:35:03.726974 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed44593c-d08d-4d9a-9d75-5187e5187748-utilities\") pod \"community-operators-r76gh\" (UID: \"ed44593c-d08d-4d9a-9d75-5187e5187748\") " pod="openshift-marketplace/community-operators-r76gh" Jan 04 12:35:03 crc kubenswrapper[5003]: I0104 12:35:03.727459 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed44593c-d08d-4d9a-9d75-5187e5187748-catalog-content\") pod \"community-operators-r76gh\" (UID: \"ed44593c-d08d-4d9a-9d75-5187e5187748\") " pod="openshift-marketplace/community-operators-r76gh" Jan 04 12:35:03 crc kubenswrapper[5003]: I0104 12:35:03.727525 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed44593c-d08d-4d9a-9d75-5187e5187748-utilities\") pod \"community-operators-r76gh\" (UID: \"ed44593c-d08d-4d9a-9d75-5187e5187748\") " pod="openshift-marketplace/community-operators-r76gh" Jan 04 12:35:03 crc kubenswrapper[5003]: I0104 12:35:03.749184 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xmqn\" (UniqueName: \"kubernetes.io/projected/ed44593c-d08d-4d9a-9d75-5187e5187748-kube-api-access-8xmqn\") pod \"community-operators-r76gh\" (UID: \"ed44593c-d08d-4d9a-9d75-5187e5187748\") " pod="openshift-marketplace/community-operators-r76gh" Jan 04 12:35:03 crc kubenswrapper[5003]: I0104 12:35:03.910584 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r76gh" Jan 04 12:35:04 crc kubenswrapper[5003]: I0104 12:35:04.198542 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-r76gh"] Jan 04 12:35:04 crc kubenswrapper[5003]: I0104 12:35:04.743478 5003 generic.go:334] "Generic (PLEG): container finished" podID="ed44593c-d08d-4d9a-9d75-5187e5187748" containerID="bad770c490cc8b524f934d3e07ed8dd914a5d626ffe47fb13a82d36cfb0d4247" exitCode=0 Jan 04 12:35:04 crc kubenswrapper[5003]: I0104 12:35:04.743551 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r76gh" event={"ID":"ed44593c-d08d-4d9a-9d75-5187e5187748","Type":"ContainerDied","Data":"bad770c490cc8b524f934d3e07ed8dd914a5d626ffe47fb13a82d36cfb0d4247"} Jan 04 12:35:04 crc kubenswrapper[5003]: I0104 12:35:04.743594 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r76gh" event={"ID":"ed44593c-d08d-4d9a-9d75-5187e5187748","Type":"ContainerStarted","Data":"5794a46e5d8b9178fe169b127356e8892cbdb722c9631e856f94b08dffe64c0d"} Jan 04 12:35:05 crc kubenswrapper[5003]: I0104 12:35:05.759340 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r76gh" event={"ID":"ed44593c-d08d-4d9a-9d75-5187e5187748","Type":"ContainerStarted","Data":"1837dc647618a9a9aa41c111f6a6cc8c084cd44fbc129881a788a6798f5236e8"} Jan 04 12:35:06 crc kubenswrapper[5003]: I0104 12:35:06.768127 5003 generic.go:334] "Generic (PLEG): container finished" podID="ed44593c-d08d-4d9a-9d75-5187e5187748" containerID="1837dc647618a9a9aa41c111f6a6cc8c084cd44fbc129881a788a6798f5236e8" exitCode=0 Jan 04 12:35:06 crc kubenswrapper[5003]: I0104 12:35:06.768189 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r76gh" event={"ID":"ed44593c-d08d-4d9a-9d75-5187e5187748","Type":"ContainerDied","Data":"1837dc647618a9a9aa41c111f6a6cc8c084cd44fbc129881a788a6798f5236e8"} Jan 04 12:35:07 crc kubenswrapper[5003]: I0104 12:35:07.781460 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r76gh" event={"ID":"ed44593c-d08d-4d9a-9d75-5187e5187748","Type":"ContainerStarted","Data":"2220bbddfc7f71c763812671e2289b184e09fb7b0213981e1ffa66e5b4fab459"} Jan 04 12:35:07 crc kubenswrapper[5003]: I0104 12:35:07.818061 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-r76gh" podStartSLOduration=2.316191706 podStartE2EDuration="4.818001443s" podCreationTimestamp="2026-01-04 12:35:03 +0000 UTC" firstStartedPulling="2026-01-04 12:35:04.746338863 +0000 UTC m=+2820.219368744" lastFinishedPulling="2026-01-04 12:35:07.24814864 +0000 UTC m=+2822.721178481" observedRunningTime="2026-01-04 12:35:07.808903258 +0000 UTC m=+2823.281933109" watchObservedRunningTime="2026-01-04 12:35:07.818001443 +0000 UTC m=+2823.291031324" Jan 04 12:35:13 crc kubenswrapper[5003]: I0104 12:35:13.911314 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-r76gh" Jan 04 12:35:13 crc kubenswrapper[5003]: I0104 12:35:13.912293 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-r76gh" Jan 04 12:35:13 crc kubenswrapper[5003]: I0104 12:35:13.992372 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-r76gh" Jan 04 12:35:14 crc kubenswrapper[5003]: I0104 12:35:14.931543 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-r76gh" Jan 04 12:35:15 crc kubenswrapper[5003]: I0104 12:35:15.013335 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-r76gh"] Jan 04 12:35:16 crc kubenswrapper[5003]: I0104 12:35:16.884168 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-r76gh" podUID="ed44593c-d08d-4d9a-9d75-5187e5187748" containerName="registry-server" containerID="cri-o://2220bbddfc7f71c763812671e2289b184e09fb7b0213981e1ffa66e5b4fab459" gracePeriod=2 Jan 04 12:35:17 crc kubenswrapper[5003]: I0104 12:35:17.392092 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r76gh" Jan 04 12:35:17 crc kubenswrapper[5003]: I0104 12:35:17.593291 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed44593c-d08d-4d9a-9d75-5187e5187748-utilities\") pod \"ed44593c-d08d-4d9a-9d75-5187e5187748\" (UID: \"ed44593c-d08d-4d9a-9d75-5187e5187748\") " Jan 04 12:35:17 crc kubenswrapper[5003]: I0104 12:35:17.593410 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed44593c-d08d-4d9a-9d75-5187e5187748-catalog-content\") pod \"ed44593c-d08d-4d9a-9d75-5187e5187748\" (UID: \"ed44593c-d08d-4d9a-9d75-5187e5187748\") " Jan 04 12:35:17 crc kubenswrapper[5003]: I0104 12:35:17.593486 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8xmqn\" (UniqueName: \"kubernetes.io/projected/ed44593c-d08d-4d9a-9d75-5187e5187748-kube-api-access-8xmqn\") pod \"ed44593c-d08d-4d9a-9d75-5187e5187748\" (UID: \"ed44593c-d08d-4d9a-9d75-5187e5187748\") " Jan 04 12:35:17 crc kubenswrapper[5003]: I0104 12:35:17.595923 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed44593c-d08d-4d9a-9d75-5187e5187748-utilities" (OuterVolumeSpecName: "utilities") pod "ed44593c-d08d-4d9a-9d75-5187e5187748" (UID: "ed44593c-d08d-4d9a-9d75-5187e5187748"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:35:17 crc kubenswrapper[5003]: I0104 12:35:17.606708 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed44593c-d08d-4d9a-9d75-5187e5187748-kube-api-access-8xmqn" (OuterVolumeSpecName: "kube-api-access-8xmqn") pod "ed44593c-d08d-4d9a-9d75-5187e5187748" (UID: "ed44593c-d08d-4d9a-9d75-5187e5187748"). InnerVolumeSpecName "kube-api-access-8xmqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:35:17 crc kubenswrapper[5003]: I0104 12:35:17.667726 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed44593c-d08d-4d9a-9d75-5187e5187748-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ed44593c-d08d-4d9a-9d75-5187e5187748" (UID: "ed44593c-d08d-4d9a-9d75-5187e5187748"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:35:17 crc kubenswrapper[5003]: I0104 12:35:17.695573 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed44593c-d08d-4d9a-9d75-5187e5187748-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:35:17 crc kubenswrapper[5003]: I0104 12:35:17.695604 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed44593c-d08d-4d9a-9d75-5187e5187748-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:35:17 crc kubenswrapper[5003]: I0104 12:35:17.695616 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8xmqn\" (UniqueName: \"kubernetes.io/projected/ed44593c-d08d-4d9a-9d75-5187e5187748-kube-api-access-8xmqn\") on node \"crc\" DevicePath \"\"" Jan 04 12:35:17 crc kubenswrapper[5003]: I0104 12:35:17.898081 5003 generic.go:334] "Generic (PLEG): container finished" podID="ed44593c-d08d-4d9a-9d75-5187e5187748" containerID="2220bbddfc7f71c763812671e2289b184e09fb7b0213981e1ffa66e5b4fab459" exitCode=0 Jan 04 12:35:17 crc kubenswrapper[5003]: I0104 12:35:17.898159 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r76gh" event={"ID":"ed44593c-d08d-4d9a-9d75-5187e5187748","Type":"ContainerDied","Data":"2220bbddfc7f71c763812671e2289b184e09fb7b0213981e1ffa66e5b4fab459"} Jan 04 12:35:17 crc kubenswrapper[5003]: I0104 12:35:17.898180 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r76gh" Jan 04 12:35:17 crc kubenswrapper[5003]: I0104 12:35:17.898204 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r76gh" event={"ID":"ed44593c-d08d-4d9a-9d75-5187e5187748","Type":"ContainerDied","Data":"5794a46e5d8b9178fe169b127356e8892cbdb722c9631e856f94b08dffe64c0d"} Jan 04 12:35:17 crc kubenswrapper[5003]: I0104 12:35:17.898238 5003 scope.go:117] "RemoveContainer" containerID="2220bbddfc7f71c763812671e2289b184e09fb7b0213981e1ffa66e5b4fab459" Jan 04 12:35:17 crc kubenswrapper[5003]: I0104 12:35:17.940423 5003 scope.go:117] "RemoveContainer" containerID="1837dc647618a9a9aa41c111f6a6cc8c084cd44fbc129881a788a6798f5236e8" Jan 04 12:35:17 crc kubenswrapper[5003]: I0104 12:35:17.943509 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-r76gh"] Jan 04 12:35:17 crc kubenswrapper[5003]: I0104 12:35:17.954661 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-r76gh"] Jan 04 12:35:17 crc kubenswrapper[5003]: I0104 12:35:17.971642 5003 scope.go:117] "RemoveContainer" containerID="bad770c490cc8b524f934d3e07ed8dd914a5d626ffe47fb13a82d36cfb0d4247" Jan 04 12:35:18 crc kubenswrapper[5003]: I0104 12:35:18.001986 5003 scope.go:117] "RemoveContainer" containerID="2220bbddfc7f71c763812671e2289b184e09fb7b0213981e1ffa66e5b4fab459" Jan 04 12:35:18 crc kubenswrapper[5003]: E0104 12:35:18.002781 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2220bbddfc7f71c763812671e2289b184e09fb7b0213981e1ffa66e5b4fab459\": container with ID starting with 2220bbddfc7f71c763812671e2289b184e09fb7b0213981e1ffa66e5b4fab459 not found: ID does not exist" containerID="2220bbddfc7f71c763812671e2289b184e09fb7b0213981e1ffa66e5b4fab459" Jan 04 12:35:18 crc kubenswrapper[5003]: I0104 12:35:18.002862 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2220bbddfc7f71c763812671e2289b184e09fb7b0213981e1ffa66e5b4fab459"} err="failed to get container status \"2220bbddfc7f71c763812671e2289b184e09fb7b0213981e1ffa66e5b4fab459\": rpc error: code = NotFound desc = could not find container \"2220bbddfc7f71c763812671e2289b184e09fb7b0213981e1ffa66e5b4fab459\": container with ID starting with 2220bbddfc7f71c763812671e2289b184e09fb7b0213981e1ffa66e5b4fab459 not found: ID does not exist" Jan 04 12:35:18 crc kubenswrapper[5003]: I0104 12:35:18.002908 5003 scope.go:117] "RemoveContainer" containerID="1837dc647618a9a9aa41c111f6a6cc8c084cd44fbc129881a788a6798f5236e8" Jan 04 12:35:18 crc kubenswrapper[5003]: E0104 12:35:18.003794 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1837dc647618a9a9aa41c111f6a6cc8c084cd44fbc129881a788a6798f5236e8\": container with ID starting with 1837dc647618a9a9aa41c111f6a6cc8c084cd44fbc129881a788a6798f5236e8 not found: ID does not exist" containerID="1837dc647618a9a9aa41c111f6a6cc8c084cd44fbc129881a788a6798f5236e8" Jan 04 12:35:18 crc kubenswrapper[5003]: I0104 12:35:18.003880 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1837dc647618a9a9aa41c111f6a6cc8c084cd44fbc129881a788a6798f5236e8"} err="failed to get container status \"1837dc647618a9a9aa41c111f6a6cc8c084cd44fbc129881a788a6798f5236e8\": rpc error: code = NotFound desc = could not find container \"1837dc647618a9a9aa41c111f6a6cc8c084cd44fbc129881a788a6798f5236e8\": container with ID starting with 1837dc647618a9a9aa41c111f6a6cc8c084cd44fbc129881a788a6798f5236e8 not found: ID does not exist" Jan 04 12:35:18 crc kubenswrapper[5003]: I0104 12:35:18.003936 5003 scope.go:117] "RemoveContainer" containerID="bad770c490cc8b524f934d3e07ed8dd914a5d626ffe47fb13a82d36cfb0d4247" Jan 04 12:35:18 crc kubenswrapper[5003]: E0104 12:35:18.004762 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bad770c490cc8b524f934d3e07ed8dd914a5d626ffe47fb13a82d36cfb0d4247\": container with ID starting with bad770c490cc8b524f934d3e07ed8dd914a5d626ffe47fb13a82d36cfb0d4247 not found: ID does not exist" containerID="bad770c490cc8b524f934d3e07ed8dd914a5d626ffe47fb13a82d36cfb0d4247" Jan 04 12:35:18 crc kubenswrapper[5003]: I0104 12:35:18.004814 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bad770c490cc8b524f934d3e07ed8dd914a5d626ffe47fb13a82d36cfb0d4247"} err="failed to get container status \"bad770c490cc8b524f934d3e07ed8dd914a5d626ffe47fb13a82d36cfb0d4247\": rpc error: code = NotFound desc = could not find container \"bad770c490cc8b524f934d3e07ed8dd914a5d626ffe47fb13a82d36cfb0d4247\": container with ID starting with bad770c490cc8b524f934d3e07ed8dd914a5d626ffe47fb13a82d36cfb0d4247 not found: ID does not exist" Jan 04 12:35:18 crc kubenswrapper[5003]: I0104 12:35:18.826641 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed44593c-d08d-4d9a-9d75-5187e5187748" path="/var/lib/kubelet/pods/ed44593c-d08d-4d9a-9d75-5187e5187748/volumes" Jan 04 12:35:36 crc kubenswrapper[5003]: I0104 12:35:36.306621 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-9rxfp"] Jan 04 12:35:36 crc kubenswrapper[5003]: E0104 12:35:36.307619 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed44593c-d08d-4d9a-9d75-5187e5187748" containerName="extract-content" Jan 04 12:35:36 crc kubenswrapper[5003]: I0104 12:35:36.307635 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed44593c-d08d-4d9a-9d75-5187e5187748" containerName="extract-content" Jan 04 12:35:36 crc kubenswrapper[5003]: E0104 12:35:36.307651 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed44593c-d08d-4d9a-9d75-5187e5187748" containerName="extract-utilities" Jan 04 12:35:36 crc kubenswrapper[5003]: I0104 12:35:36.307658 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed44593c-d08d-4d9a-9d75-5187e5187748" containerName="extract-utilities" Jan 04 12:35:36 crc kubenswrapper[5003]: E0104 12:35:36.307690 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed44593c-d08d-4d9a-9d75-5187e5187748" containerName="registry-server" Jan 04 12:35:36 crc kubenswrapper[5003]: I0104 12:35:36.307698 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed44593c-d08d-4d9a-9d75-5187e5187748" containerName="registry-server" Jan 04 12:35:36 crc kubenswrapper[5003]: I0104 12:35:36.307859 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed44593c-d08d-4d9a-9d75-5187e5187748" containerName="registry-server" Jan 04 12:35:36 crc kubenswrapper[5003]: I0104 12:35:36.309548 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9rxfp" Jan 04 12:35:36 crc kubenswrapper[5003]: I0104 12:35:36.320683 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9rxfp"] Jan 04 12:35:36 crc kubenswrapper[5003]: I0104 12:35:36.418394 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p7ws7\" (UniqueName: \"kubernetes.io/projected/284ca8da-7523-4333-b6f1-ae671cfcde6f-kube-api-access-p7ws7\") pod \"redhat-marketplace-9rxfp\" (UID: \"284ca8da-7523-4333-b6f1-ae671cfcde6f\") " pod="openshift-marketplace/redhat-marketplace-9rxfp" Jan 04 12:35:36 crc kubenswrapper[5003]: I0104 12:35:36.418513 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/284ca8da-7523-4333-b6f1-ae671cfcde6f-catalog-content\") pod \"redhat-marketplace-9rxfp\" (UID: \"284ca8da-7523-4333-b6f1-ae671cfcde6f\") " pod="openshift-marketplace/redhat-marketplace-9rxfp" Jan 04 12:35:36 crc kubenswrapper[5003]: I0104 12:35:36.418546 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/284ca8da-7523-4333-b6f1-ae671cfcde6f-utilities\") pod \"redhat-marketplace-9rxfp\" (UID: \"284ca8da-7523-4333-b6f1-ae671cfcde6f\") " pod="openshift-marketplace/redhat-marketplace-9rxfp" Jan 04 12:35:36 crc kubenswrapper[5003]: I0104 12:35:36.520346 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/284ca8da-7523-4333-b6f1-ae671cfcde6f-catalog-content\") pod \"redhat-marketplace-9rxfp\" (UID: \"284ca8da-7523-4333-b6f1-ae671cfcde6f\") " pod="openshift-marketplace/redhat-marketplace-9rxfp" Jan 04 12:35:36 crc kubenswrapper[5003]: I0104 12:35:36.520575 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/284ca8da-7523-4333-b6f1-ae671cfcde6f-utilities\") pod \"redhat-marketplace-9rxfp\" (UID: \"284ca8da-7523-4333-b6f1-ae671cfcde6f\") " pod="openshift-marketplace/redhat-marketplace-9rxfp" Jan 04 12:35:36 crc kubenswrapper[5003]: I0104 12:35:36.521202 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/284ca8da-7523-4333-b6f1-ae671cfcde6f-utilities\") pod \"redhat-marketplace-9rxfp\" (UID: \"284ca8da-7523-4333-b6f1-ae671cfcde6f\") " pod="openshift-marketplace/redhat-marketplace-9rxfp" Jan 04 12:35:36 crc kubenswrapper[5003]: I0104 12:35:36.521276 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/284ca8da-7523-4333-b6f1-ae671cfcde6f-catalog-content\") pod \"redhat-marketplace-9rxfp\" (UID: \"284ca8da-7523-4333-b6f1-ae671cfcde6f\") " pod="openshift-marketplace/redhat-marketplace-9rxfp" Jan 04 12:35:36 crc kubenswrapper[5003]: I0104 12:35:36.521447 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p7ws7\" (UniqueName: \"kubernetes.io/projected/284ca8da-7523-4333-b6f1-ae671cfcde6f-kube-api-access-p7ws7\") pod \"redhat-marketplace-9rxfp\" (UID: \"284ca8da-7523-4333-b6f1-ae671cfcde6f\") " pod="openshift-marketplace/redhat-marketplace-9rxfp" Jan 04 12:35:36 crc kubenswrapper[5003]: I0104 12:35:36.541195 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p7ws7\" (UniqueName: \"kubernetes.io/projected/284ca8da-7523-4333-b6f1-ae671cfcde6f-kube-api-access-p7ws7\") pod \"redhat-marketplace-9rxfp\" (UID: \"284ca8da-7523-4333-b6f1-ae671cfcde6f\") " pod="openshift-marketplace/redhat-marketplace-9rxfp" Jan 04 12:35:36 crc kubenswrapper[5003]: I0104 12:35:36.696250 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9rxfp" Jan 04 12:35:37 crc kubenswrapper[5003]: I0104 12:35:37.128444 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9rxfp"] Jan 04 12:35:38 crc kubenswrapper[5003]: I0104 12:35:38.118558 5003 generic.go:334] "Generic (PLEG): container finished" podID="284ca8da-7523-4333-b6f1-ae671cfcde6f" containerID="7cbb6b91ac95e1f3ee1e687ab26ee79b63a6b3ad14c25a541711e1d041c245ce" exitCode=0 Jan 04 12:35:38 crc kubenswrapper[5003]: I0104 12:35:38.118676 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9rxfp" event={"ID":"284ca8da-7523-4333-b6f1-ae671cfcde6f","Type":"ContainerDied","Data":"7cbb6b91ac95e1f3ee1e687ab26ee79b63a6b3ad14c25a541711e1d041c245ce"} Jan 04 12:35:38 crc kubenswrapper[5003]: I0104 12:35:38.119108 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9rxfp" event={"ID":"284ca8da-7523-4333-b6f1-ae671cfcde6f","Type":"ContainerStarted","Data":"cc945546b2abcbb5e9e4619bbf7515ec1307572234ca0b00de96738c7da0970b"} Jan 04 12:35:39 crc kubenswrapper[5003]: I0104 12:35:39.128465 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9rxfp" event={"ID":"284ca8da-7523-4333-b6f1-ae671cfcde6f","Type":"ContainerStarted","Data":"2a3082db9ad496f900900f0f569b1da281c41ca38daf8fb2fedac08d4cae72c2"} Jan 04 12:35:40 crc kubenswrapper[5003]: I0104 12:35:40.140906 5003 generic.go:334] "Generic (PLEG): container finished" podID="284ca8da-7523-4333-b6f1-ae671cfcde6f" containerID="2a3082db9ad496f900900f0f569b1da281c41ca38daf8fb2fedac08d4cae72c2" exitCode=0 Jan 04 12:35:40 crc kubenswrapper[5003]: I0104 12:35:40.141120 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9rxfp" event={"ID":"284ca8da-7523-4333-b6f1-ae671cfcde6f","Type":"ContainerDied","Data":"2a3082db9ad496f900900f0f569b1da281c41ca38daf8fb2fedac08d4cae72c2"} Jan 04 12:35:41 crc kubenswrapper[5003]: I0104 12:35:41.152357 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9rxfp" event={"ID":"284ca8da-7523-4333-b6f1-ae671cfcde6f","Type":"ContainerStarted","Data":"86fc46383858b24da4c44904f566226ae3e1cf5b478e644d79159a15d94e3351"} Jan 04 12:35:41 crc kubenswrapper[5003]: I0104 12:35:41.185438 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-9rxfp" podStartSLOduration=2.740005186 podStartE2EDuration="5.185407966s" podCreationTimestamp="2026-01-04 12:35:36 +0000 UTC" firstStartedPulling="2026-01-04 12:35:38.121163895 +0000 UTC m=+2853.594193776" lastFinishedPulling="2026-01-04 12:35:40.566566715 +0000 UTC m=+2856.039596556" observedRunningTime="2026-01-04 12:35:41.17812093 +0000 UTC m=+2856.651150771" watchObservedRunningTime="2026-01-04 12:35:41.185407966 +0000 UTC m=+2856.658437847" Jan 04 12:35:46 crc kubenswrapper[5003]: I0104 12:35:46.696877 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-9rxfp" Jan 04 12:35:46 crc kubenswrapper[5003]: I0104 12:35:46.697291 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-9rxfp" Jan 04 12:35:46 crc kubenswrapper[5003]: I0104 12:35:46.753088 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-9rxfp" Jan 04 12:35:47 crc kubenswrapper[5003]: I0104 12:35:47.275313 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-9rxfp" Jan 04 12:35:47 crc kubenswrapper[5003]: I0104 12:35:47.337699 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9rxfp"] Jan 04 12:35:49 crc kubenswrapper[5003]: I0104 12:35:49.224636 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-9rxfp" podUID="284ca8da-7523-4333-b6f1-ae671cfcde6f" containerName="registry-server" containerID="cri-o://86fc46383858b24da4c44904f566226ae3e1cf5b478e644d79159a15d94e3351" gracePeriod=2 Jan 04 12:35:50 crc kubenswrapper[5003]: I0104 12:35:50.236243 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9rxfp" Jan 04 12:35:50 crc kubenswrapper[5003]: I0104 12:35:50.239319 5003 generic.go:334] "Generic (PLEG): container finished" podID="284ca8da-7523-4333-b6f1-ae671cfcde6f" containerID="86fc46383858b24da4c44904f566226ae3e1cf5b478e644d79159a15d94e3351" exitCode=0 Jan 04 12:35:50 crc kubenswrapper[5003]: I0104 12:35:50.239386 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9rxfp" event={"ID":"284ca8da-7523-4333-b6f1-ae671cfcde6f","Type":"ContainerDied","Data":"86fc46383858b24da4c44904f566226ae3e1cf5b478e644d79159a15d94e3351"} Jan 04 12:35:50 crc kubenswrapper[5003]: I0104 12:35:50.239481 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9rxfp" event={"ID":"284ca8da-7523-4333-b6f1-ae671cfcde6f","Type":"ContainerDied","Data":"cc945546b2abcbb5e9e4619bbf7515ec1307572234ca0b00de96738c7da0970b"} Jan 04 12:35:50 crc kubenswrapper[5003]: I0104 12:35:50.239526 5003 scope.go:117] "RemoveContainer" containerID="86fc46383858b24da4c44904f566226ae3e1cf5b478e644d79159a15d94e3351" Jan 04 12:35:50 crc kubenswrapper[5003]: I0104 12:35:50.295889 5003 scope.go:117] "RemoveContainer" containerID="2a3082db9ad496f900900f0f569b1da281c41ca38daf8fb2fedac08d4cae72c2" Jan 04 12:35:50 crc kubenswrapper[5003]: I0104 12:35:50.328656 5003 scope.go:117] "RemoveContainer" containerID="7cbb6b91ac95e1f3ee1e687ab26ee79b63a6b3ad14c25a541711e1d041c245ce" Jan 04 12:35:50 crc kubenswrapper[5003]: I0104 12:35:50.354039 5003 scope.go:117] "RemoveContainer" containerID="86fc46383858b24da4c44904f566226ae3e1cf5b478e644d79159a15d94e3351" Jan 04 12:35:50 crc kubenswrapper[5003]: E0104 12:35:50.354813 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"86fc46383858b24da4c44904f566226ae3e1cf5b478e644d79159a15d94e3351\": container with ID starting with 86fc46383858b24da4c44904f566226ae3e1cf5b478e644d79159a15d94e3351 not found: ID does not exist" containerID="86fc46383858b24da4c44904f566226ae3e1cf5b478e644d79159a15d94e3351" Jan 04 12:35:50 crc kubenswrapper[5003]: I0104 12:35:50.354875 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86fc46383858b24da4c44904f566226ae3e1cf5b478e644d79159a15d94e3351"} err="failed to get container status \"86fc46383858b24da4c44904f566226ae3e1cf5b478e644d79159a15d94e3351\": rpc error: code = NotFound desc = could not find container \"86fc46383858b24da4c44904f566226ae3e1cf5b478e644d79159a15d94e3351\": container with ID starting with 86fc46383858b24da4c44904f566226ae3e1cf5b478e644d79159a15d94e3351 not found: ID does not exist" Jan 04 12:35:50 crc kubenswrapper[5003]: I0104 12:35:50.354918 5003 scope.go:117] "RemoveContainer" containerID="2a3082db9ad496f900900f0f569b1da281c41ca38daf8fb2fedac08d4cae72c2" Jan 04 12:35:50 crc kubenswrapper[5003]: E0104 12:35:50.355449 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a3082db9ad496f900900f0f569b1da281c41ca38daf8fb2fedac08d4cae72c2\": container with ID starting with 2a3082db9ad496f900900f0f569b1da281c41ca38daf8fb2fedac08d4cae72c2 not found: ID does not exist" containerID="2a3082db9ad496f900900f0f569b1da281c41ca38daf8fb2fedac08d4cae72c2" Jan 04 12:35:50 crc kubenswrapper[5003]: I0104 12:35:50.355491 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a3082db9ad496f900900f0f569b1da281c41ca38daf8fb2fedac08d4cae72c2"} err="failed to get container status \"2a3082db9ad496f900900f0f569b1da281c41ca38daf8fb2fedac08d4cae72c2\": rpc error: code = NotFound desc = could not find container \"2a3082db9ad496f900900f0f569b1da281c41ca38daf8fb2fedac08d4cae72c2\": container with ID starting with 2a3082db9ad496f900900f0f569b1da281c41ca38daf8fb2fedac08d4cae72c2 not found: ID does not exist" Jan 04 12:35:50 crc kubenswrapper[5003]: I0104 12:35:50.355520 5003 scope.go:117] "RemoveContainer" containerID="7cbb6b91ac95e1f3ee1e687ab26ee79b63a6b3ad14c25a541711e1d041c245ce" Jan 04 12:35:50 crc kubenswrapper[5003]: E0104 12:35:50.356287 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7cbb6b91ac95e1f3ee1e687ab26ee79b63a6b3ad14c25a541711e1d041c245ce\": container with ID starting with 7cbb6b91ac95e1f3ee1e687ab26ee79b63a6b3ad14c25a541711e1d041c245ce not found: ID does not exist" containerID="7cbb6b91ac95e1f3ee1e687ab26ee79b63a6b3ad14c25a541711e1d041c245ce" Jan 04 12:35:50 crc kubenswrapper[5003]: I0104 12:35:50.356340 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7cbb6b91ac95e1f3ee1e687ab26ee79b63a6b3ad14c25a541711e1d041c245ce"} err="failed to get container status \"7cbb6b91ac95e1f3ee1e687ab26ee79b63a6b3ad14c25a541711e1d041c245ce\": rpc error: code = NotFound desc = could not find container \"7cbb6b91ac95e1f3ee1e687ab26ee79b63a6b3ad14c25a541711e1d041c245ce\": container with ID starting with 7cbb6b91ac95e1f3ee1e687ab26ee79b63a6b3ad14c25a541711e1d041c245ce not found: ID does not exist" Jan 04 12:35:50 crc kubenswrapper[5003]: I0104 12:35:50.361656 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/284ca8da-7523-4333-b6f1-ae671cfcde6f-catalog-content\") pod \"284ca8da-7523-4333-b6f1-ae671cfcde6f\" (UID: \"284ca8da-7523-4333-b6f1-ae671cfcde6f\") " Jan 04 12:35:50 crc kubenswrapper[5003]: I0104 12:35:50.361727 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p7ws7\" (UniqueName: \"kubernetes.io/projected/284ca8da-7523-4333-b6f1-ae671cfcde6f-kube-api-access-p7ws7\") pod \"284ca8da-7523-4333-b6f1-ae671cfcde6f\" (UID: \"284ca8da-7523-4333-b6f1-ae671cfcde6f\") " Jan 04 12:35:50 crc kubenswrapper[5003]: I0104 12:35:50.361865 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/284ca8da-7523-4333-b6f1-ae671cfcde6f-utilities\") pod \"284ca8da-7523-4333-b6f1-ae671cfcde6f\" (UID: \"284ca8da-7523-4333-b6f1-ae671cfcde6f\") " Jan 04 12:35:50 crc kubenswrapper[5003]: I0104 12:35:50.363512 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/284ca8da-7523-4333-b6f1-ae671cfcde6f-utilities" (OuterVolumeSpecName: "utilities") pod "284ca8da-7523-4333-b6f1-ae671cfcde6f" (UID: "284ca8da-7523-4333-b6f1-ae671cfcde6f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:35:50 crc kubenswrapper[5003]: I0104 12:35:50.374973 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/284ca8da-7523-4333-b6f1-ae671cfcde6f-kube-api-access-p7ws7" (OuterVolumeSpecName: "kube-api-access-p7ws7") pod "284ca8da-7523-4333-b6f1-ae671cfcde6f" (UID: "284ca8da-7523-4333-b6f1-ae671cfcde6f"). InnerVolumeSpecName "kube-api-access-p7ws7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:35:50 crc kubenswrapper[5003]: I0104 12:35:50.392095 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/284ca8da-7523-4333-b6f1-ae671cfcde6f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "284ca8da-7523-4333-b6f1-ae671cfcde6f" (UID: "284ca8da-7523-4333-b6f1-ae671cfcde6f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:35:50 crc kubenswrapper[5003]: I0104 12:35:50.464345 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/284ca8da-7523-4333-b6f1-ae671cfcde6f-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:35:50 crc kubenswrapper[5003]: I0104 12:35:50.464394 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/284ca8da-7523-4333-b6f1-ae671cfcde6f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:35:50 crc kubenswrapper[5003]: I0104 12:35:50.464414 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p7ws7\" (UniqueName: \"kubernetes.io/projected/284ca8da-7523-4333-b6f1-ae671cfcde6f-kube-api-access-p7ws7\") on node \"crc\" DevicePath \"\"" Jan 04 12:35:51 crc kubenswrapper[5003]: I0104 12:35:51.249500 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9rxfp" Jan 04 12:35:51 crc kubenswrapper[5003]: I0104 12:35:51.281245 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9rxfp"] Jan 04 12:35:51 crc kubenswrapper[5003]: I0104 12:35:51.288128 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-9rxfp"] Jan 04 12:35:52 crc kubenswrapper[5003]: I0104 12:35:52.817988 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="284ca8da-7523-4333-b6f1-ae671cfcde6f" path="/var/lib/kubelet/pods/284ca8da-7523-4333-b6f1-ae671cfcde6f/volumes" Jan 04 12:36:09 crc kubenswrapper[5003]: I0104 12:36:09.419195 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:36:09 crc kubenswrapper[5003]: I0104 12:36:09.419797 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:36:29 crc kubenswrapper[5003]: I0104 12:36:29.476916 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vnzj9"] Jan 04 12:36:29 crc kubenswrapper[5003]: E0104 12:36:29.478128 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="284ca8da-7523-4333-b6f1-ae671cfcde6f" containerName="extract-content" Jan 04 12:36:29 crc kubenswrapper[5003]: I0104 12:36:29.478149 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="284ca8da-7523-4333-b6f1-ae671cfcde6f" containerName="extract-content" Jan 04 12:36:29 crc kubenswrapper[5003]: E0104 12:36:29.478172 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="284ca8da-7523-4333-b6f1-ae671cfcde6f" containerName="registry-server" Jan 04 12:36:29 crc kubenswrapper[5003]: I0104 12:36:29.478183 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="284ca8da-7523-4333-b6f1-ae671cfcde6f" containerName="registry-server" Jan 04 12:36:29 crc kubenswrapper[5003]: E0104 12:36:29.478203 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="284ca8da-7523-4333-b6f1-ae671cfcde6f" containerName="extract-utilities" Jan 04 12:36:29 crc kubenswrapper[5003]: I0104 12:36:29.478214 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="284ca8da-7523-4333-b6f1-ae671cfcde6f" containerName="extract-utilities" Jan 04 12:36:29 crc kubenswrapper[5003]: I0104 12:36:29.478452 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="284ca8da-7523-4333-b6f1-ae671cfcde6f" containerName="registry-server" Jan 04 12:36:29 crc kubenswrapper[5003]: I0104 12:36:29.480206 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vnzj9" Jan 04 12:36:29 crc kubenswrapper[5003]: I0104 12:36:29.489901 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vnzj9"] Jan 04 12:36:29 crc kubenswrapper[5003]: I0104 12:36:29.570878 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k68g7\" (UniqueName: \"kubernetes.io/projected/765f90a8-41c1-4188-ae8a-ff526a56ad35-kube-api-access-k68g7\") pod \"redhat-operators-vnzj9\" (UID: \"765f90a8-41c1-4188-ae8a-ff526a56ad35\") " pod="openshift-marketplace/redhat-operators-vnzj9" Jan 04 12:36:29 crc kubenswrapper[5003]: I0104 12:36:29.570955 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/765f90a8-41c1-4188-ae8a-ff526a56ad35-utilities\") pod \"redhat-operators-vnzj9\" (UID: \"765f90a8-41c1-4188-ae8a-ff526a56ad35\") " pod="openshift-marketplace/redhat-operators-vnzj9" Jan 04 12:36:29 crc kubenswrapper[5003]: I0104 12:36:29.571010 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/765f90a8-41c1-4188-ae8a-ff526a56ad35-catalog-content\") pod \"redhat-operators-vnzj9\" (UID: \"765f90a8-41c1-4188-ae8a-ff526a56ad35\") " pod="openshift-marketplace/redhat-operators-vnzj9" Jan 04 12:36:29 crc kubenswrapper[5003]: I0104 12:36:29.672569 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k68g7\" (UniqueName: \"kubernetes.io/projected/765f90a8-41c1-4188-ae8a-ff526a56ad35-kube-api-access-k68g7\") pod \"redhat-operators-vnzj9\" (UID: \"765f90a8-41c1-4188-ae8a-ff526a56ad35\") " pod="openshift-marketplace/redhat-operators-vnzj9" Jan 04 12:36:29 crc kubenswrapper[5003]: I0104 12:36:29.672643 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/765f90a8-41c1-4188-ae8a-ff526a56ad35-utilities\") pod \"redhat-operators-vnzj9\" (UID: \"765f90a8-41c1-4188-ae8a-ff526a56ad35\") " pod="openshift-marketplace/redhat-operators-vnzj9" Jan 04 12:36:29 crc kubenswrapper[5003]: I0104 12:36:29.672697 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/765f90a8-41c1-4188-ae8a-ff526a56ad35-catalog-content\") pod \"redhat-operators-vnzj9\" (UID: \"765f90a8-41c1-4188-ae8a-ff526a56ad35\") " pod="openshift-marketplace/redhat-operators-vnzj9" Jan 04 12:36:29 crc kubenswrapper[5003]: I0104 12:36:29.673171 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/765f90a8-41c1-4188-ae8a-ff526a56ad35-utilities\") pod \"redhat-operators-vnzj9\" (UID: \"765f90a8-41c1-4188-ae8a-ff526a56ad35\") " pod="openshift-marketplace/redhat-operators-vnzj9" Jan 04 12:36:29 crc kubenswrapper[5003]: I0104 12:36:29.673281 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/765f90a8-41c1-4188-ae8a-ff526a56ad35-catalog-content\") pod \"redhat-operators-vnzj9\" (UID: \"765f90a8-41c1-4188-ae8a-ff526a56ad35\") " pod="openshift-marketplace/redhat-operators-vnzj9" Jan 04 12:36:29 crc kubenswrapper[5003]: I0104 12:36:29.699004 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k68g7\" (UniqueName: \"kubernetes.io/projected/765f90a8-41c1-4188-ae8a-ff526a56ad35-kube-api-access-k68g7\") pod \"redhat-operators-vnzj9\" (UID: \"765f90a8-41c1-4188-ae8a-ff526a56ad35\") " pod="openshift-marketplace/redhat-operators-vnzj9" Jan 04 12:36:29 crc kubenswrapper[5003]: I0104 12:36:29.852338 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vnzj9" Jan 04 12:36:30 crc kubenswrapper[5003]: I0104 12:36:30.312623 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vnzj9"] Jan 04 12:36:30 crc kubenswrapper[5003]: I0104 12:36:30.595159 5003 generic.go:334] "Generic (PLEG): container finished" podID="765f90a8-41c1-4188-ae8a-ff526a56ad35" containerID="e26f94ddb74fc28a497f7b09349bde19a571cb34f94f905c1b09b05fa0969fad" exitCode=0 Jan 04 12:36:30 crc kubenswrapper[5003]: I0104 12:36:30.595223 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vnzj9" event={"ID":"765f90a8-41c1-4188-ae8a-ff526a56ad35","Type":"ContainerDied","Data":"e26f94ddb74fc28a497f7b09349bde19a571cb34f94f905c1b09b05fa0969fad"} Jan 04 12:36:30 crc kubenswrapper[5003]: I0104 12:36:30.595261 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vnzj9" event={"ID":"765f90a8-41c1-4188-ae8a-ff526a56ad35","Type":"ContainerStarted","Data":"9af771826d7f91d4fd2f3330cdb4c8b2823d8d313fbc7cf1b179fc8fe58ea13f"} Jan 04 12:36:31 crc kubenswrapper[5003]: I0104 12:36:31.603586 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vnzj9" event={"ID":"765f90a8-41c1-4188-ae8a-ff526a56ad35","Type":"ContainerStarted","Data":"f26682b24d66471a97c890d89d20202c9b2c133e8d8472df8363aedb9ff33a24"} Jan 04 12:36:32 crc kubenswrapper[5003]: I0104 12:36:32.613880 5003 generic.go:334] "Generic (PLEG): container finished" podID="765f90a8-41c1-4188-ae8a-ff526a56ad35" containerID="f26682b24d66471a97c890d89d20202c9b2c133e8d8472df8363aedb9ff33a24" exitCode=0 Jan 04 12:36:32 crc kubenswrapper[5003]: I0104 12:36:32.613936 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vnzj9" event={"ID":"765f90a8-41c1-4188-ae8a-ff526a56ad35","Type":"ContainerDied","Data":"f26682b24d66471a97c890d89d20202c9b2c133e8d8472df8363aedb9ff33a24"} Jan 04 12:36:33 crc kubenswrapper[5003]: I0104 12:36:33.623860 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vnzj9" event={"ID":"765f90a8-41c1-4188-ae8a-ff526a56ad35","Type":"ContainerStarted","Data":"1a276611ed03b617206ba5146a2200a57c15004c2abff5cfaa53e7da77ee0be0"} Jan 04 12:36:33 crc kubenswrapper[5003]: I0104 12:36:33.653838 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vnzj9" podStartSLOduration=2.225626811 podStartE2EDuration="4.653806497s" podCreationTimestamp="2026-01-04 12:36:29 +0000 UTC" firstStartedPulling="2026-01-04 12:36:30.596677728 +0000 UTC m=+2906.069707569" lastFinishedPulling="2026-01-04 12:36:33.024857414 +0000 UTC m=+2908.497887255" observedRunningTime="2026-01-04 12:36:33.648874715 +0000 UTC m=+2909.121904576" watchObservedRunningTime="2026-01-04 12:36:33.653806497 +0000 UTC m=+2909.126836338" Jan 04 12:36:39 crc kubenswrapper[5003]: I0104 12:36:39.419068 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:36:39 crc kubenswrapper[5003]: I0104 12:36:39.420194 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:36:39 crc kubenswrapper[5003]: I0104 12:36:39.853285 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vnzj9" Jan 04 12:36:39 crc kubenswrapper[5003]: I0104 12:36:39.853355 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vnzj9" Jan 04 12:36:39 crc kubenswrapper[5003]: I0104 12:36:39.901175 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vnzj9" Jan 04 12:36:40 crc kubenswrapper[5003]: I0104 12:36:40.760318 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vnzj9" Jan 04 12:36:40 crc kubenswrapper[5003]: I0104 12:36:40.831411 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vnzj9"] Jan 04 12:36:42 crc kubenswrapper[5003]: I0104 12:36:42.705937 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vnzj9" podUID="765f90a8-41c1-4188-ae8a-ff526a56ad35" containerName="registry-server" containerID="cri-o://1a276611ed03b617206ba5146a2200a57c15004c2abff5cfaa53e7da77ee0be0" gracePeriod=2 Jan 04 12:36:44 crc kubenswrapper[5003]: I0104 12:36:44.727426 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vnzj9" event={"ID":"765f90a8-41c1-4188-ae8a-ff526a56ad35","Type":"ContainerDied","Data":"1a276611ed03b617206ba5146a2200a57c15004c2abff5cfaa53e7da77ee0be0"} Jan 04 12:36:44 crc kubenswrapper[5003]: I0104 12:36:44.727360 5003 generic.go:334] "Generic (PLEG): container finished" podID="765f90a8-41c1-4188-ae8a-ff526a56ad35" containerID="1a276611ed03b617206ba5146a2200a57c15004c2abff5cfaa53e7da77ee0be0" exitCode=0 Jan 04 12:36:45 crc kubenswrapper[5003]: I0104 12:36:45.014169 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vnzj9" Jan 04 12:36:45 crc kubenswrapper[5003]: I0104 12:36:45.048091 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/765f90a8-41c1-4188-ae8a-ff526a56ad35-catalog-content\") pod \"765f90a8-41c1-4188-ae8a-ff526a56ad35\" (UID: \"765f90a8-41c1-4188-ae8a-ff526a56ad35\") " Jan 04 12:36:45 crc kubenswrapper[5003]: I0104 12:36:45.048241 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k68g7\" (UniqueName: \"kubernetes.io/projected/765f90a8-41c1-4188-ae8a-ff526a56ad35-kube-api-access-k68g7\") pod \"765f90a8-41c1-4188-ae8a-ff526a56ad35\" (UID: \"765f90a8-41c1-4188-ae8a-ff526a56ad35\") " Jan 04 12:36:45 crc kubenswrapper[5003]: I0104 12:36:45.048302 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/765f90a8-41c1-4188-ae8a-ff526a56ad35-utilities\") pod \"765f90a8-41c1-4188-ae8a-ff526a56ad35\" (UID: \"765f90a8-41c1-4188-ae8a-ff526a56ad35\") " Jan 04 12:36:45 crc kubenswrapper[5003]: I0104 12:36:45.049178 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/765f90a8-41c1-4188-ae8a-ff526a56ad35-utilities" (OuterVolumeSpecName: "utilities") pod "765f90a8-41c1-4188-ae8a-ff526a56ad35" (UID: "765f90a8-41c1-4188-ae8a-ff526a56ad35"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:36:45 crc kubenswrapper[5003]: I0104 12:36:45.049557 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/765f90a8-41c1-4188-ae8a-ff526a56ad35-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:36:45 crc kubenswrapper[5003]: I0104 12:36:45.065043 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/765f90a8-41c1-4188-ae8a-ff526a56ad35-kube-api-access-k68g7" (OuterVolumeSpecName: "kube-api-access-k68g7") pod "765f90a8-41c1-4188-ae8a-ff526a56ad35" (UID: "765f90a8-41c1-4188-ae8a-ff526a56ad35"). InnerVolumeSpecName "kube-api-access-k68g7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:36:45 crc kubenswrapper[5003]: I0104 12:36:45.151381 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k68g7\" (UniqueName: \"kubernetes.io/projected/765f90a8-41c1-4188-ae8a-ff526a56ad35-kube-api-access-k68g7\") on node \"crc\" DevicePath \"\"" Jan 04 12:36:45 crc kubenswrapper[5003]: I0104 12:36:45.180621 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/765f90a8-41c1-4188-ae8a-ff526a56ad35-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "765f90a8-41c1-4188-ae8a-ff526a56ad35" (UID: "765f90a8-41c1-4188-ae8a-ff526a56ad35"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:36:45 crc kubenswrapper[5003]: I0104 12:36:45.253123 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/765f90a8-41c1-4188-ae8a-ff526a56ad35-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:36:45 crc kubenswrapper[5003]: I0104 12:36:45.742715 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vnzj9" event={"ID":"765f90a8-41c1-4188-ae8a-ff526a56ad35","Type":"ContainerDied","Data":"9af771826d7f91d4fd2f3330cdb4c8b2823d8d313fbc7cf1b179fc8fe58ea13f"} Jan 04 12:36:45 crc kubenswrapper[5003]: I0104 12:36:45.742770 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vnzj9" Jan 04 12:36:45 crc kubenswrapper[5003]: I0104 12:36:45.742802 5003 scope.go:117] "RemoveContainer" containerID="1a276611ed03b617206ba5146a2200a57c15004c2abff5cfaa53e7da77ee0be0" Jan 04 12:36:45 crc kubenswrapper[5003]: I0104 12:36:45.773721 5003 scope.go:117] "RemoveContainer" containerID="f26682b24d66471a97c890d89d20202c9b2c133e8d8472df8363aedb9ff33a24" Jan 04 12:36:45 crc kubenswrapper[5003]: I0104 12:36:45.797507 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vnzj9"] Jan 04 12:36:45 crc kubenswrapper[5003]: I0104 12:36:45.800725 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vnzj9"] Jan 04 12:36:45 crc kubenswrapper[5003]: I0104 12:36:45.828791 5003 scope.go:117] "RemoveContainer" containerID="e26f94ddb74fc28a497f7b09349bde19a571cb34f94f905c1b09b05fa0969fad" Jan 04 12:36:46 crc kubenswrapper[5003]: I0104 12:36:46.824728 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="765f90a8-41c1-4188-ae8a-ff526a56ad35" path="/var/lib/kubelet/pods/765f90a8-41c1-4188-ae8a-ff526a56ad35/volumes" Jan 04 12:37:09 crc kubenswrapper[5003]: I0104 12:37:09.418705 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:37:09 crc kubenswrapper[5003]: I0104 12:37:09.419250 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:37:09 crc kubenswrapper[5003]: I0104 12:37:09.419304 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 12:37:09 crc kubenswrapper[5003]: I0104 12:37:09.419977 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4ddcf79a0d2f27b546d3e549b2efd4403aeb7886b75ace8bd8da38c929e841bc"} pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 12:37:09 crc kubenswrapper[5003]: I0104 12:37:09.420046 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" containerID="cri-o://4ddcf79a0d2f27b546d3e549b2efd4403aeb7886b75ace8bd8da38c929e841bc" gracePeriod=600 Jan 04 12:37:09 crc kubenswrapper[5003]: E0104 12:37:09.540609 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:37:09 crc kubenswrapper[5003]: I0104 12:37:09.961922 5003 generic.go:334] "Generic (PLEG): container finished" podID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerID="4ddcf79a0d2f27b546d3e549b2efd4403aeb7886b75ace8bd8da38c929e841bc" exitCode=0 Jan 04 12:37:09 crc kubenswrapper[5003]: I0104 12:37:09.961973 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerDied","Data":"4ddcf79a0d2f27b546d3e549b2efd4403aeb7886b75ace8bd8da38c929e841bc"} Jan 04 12:37:09 crc kubenswrapper[5003]: I0104 12:37:09.962029 5003 scope.go:117] "RemoveContainer" containerID="eaf0e8ffbd4b0dcc1b6bcafeb8e26b2b9e3e1b04b6362e8457206a7f09e7d1cb" Jan 04 12:37:09 crc kubenswrapper[5003]: I0104 12:37:09.962636 5003 scope.go:117] "RemoveContainer" containerID="4ddcf79a0d2f27b546d3e549b2efd4403aeb7886b75ace8bd8da38c929e841bc" Jan 04 12:37:09 crc kubenswrapper[5003]: E0104 12:37:09.962893 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:37:24 crc kubenswrapper[5003]: I0104 12:37:24.812357 5003 scope.go:117] "RemoveContainer" containerID="4ddcf79a0d2f27b546d3e549b2efd4403aeb7886b75ace8bd8da38c929e841bc" Jan 04 12:37:24 crc kubenswrapper[5003]: E0104 12:37:24.813091 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:37:39 crc kubenswrapper[5003]: I0104 12:37:39.806676 5003 scope.go:117] "RemoveContainer" containerID="4ddcf79a0d2f27b546d3e549b2efd4403aeb7886b75ace8bd8da38c929e841bc" Jan 04 12:37:39 crc kubenswrapper[5003]: E0104 12:37:39.807705 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:37:54 crc kubenswrapper[5003]: I0104 12:37:54.810318 5003 scope.go:117] "RemoveContainer" containerID="4ddcf79a0d2f27b546d3e549b2efd4403aeb7886b75ace8bd8da38c929e841bc" Jan 04 12:37:54 crc kubenswrapper[5003]: E0104 12:37:54.811318 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:38:08 crc kubenswrapper[5003]: I0104 12:38:08.808143 5003 scope.go:117] "RemoveContainer" containerID="4ddcf79a0d2f27b546d3e549b2efd4403aeb7886b75ace8bd8da38c929e841bc" Jan 04 12:38:08 crc kubenswrapper[5003]: E0104 12:38:08.813339 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:38:22 crc kubenswrapper[5003]: I0104 12:38:22.807274 5003 scope.go:117] "RemoveContainer" containerID="4ddcf79a0d2f27b546d3e549b2efd4403aeb7886b75ace8bd8da38c929e841bc" Jan 04 12:38:22 crc kubenswrapper[5003]: E0104 12:38:22.808120 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:38:36 crc kubenswrapper[5003]: I0104 12:38:36.808098 5003 scope.go:117] "RemoveContainer" containerID="4ddcf79a0d2f27b546d3e549b2efd4403aeb7886b75ace8bd8da38c929e841bc" Jan 04 12:38:36 crc kubenswrapper[5003]: E0104 12:38:36.809151 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:38:49 crc kubenswrapper[5003]: I0104 12:38:49.818260 5003 scope.go:117] "RemoveContainer" containerID="4ddcf79a0d2f27b546d3e549b2efd4403aeb7886b75ace8bd8da38c929e841bc" Jan 04 12:38:49 crc kubenswrapper[5003]: E0104 12:38:49.820072 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:39:03 crc kubenswrapper[5003]: I0104 12:39:03.806591 5003 scope.go:117] "RemoveContainer" containerID="4ddcf79a0d2f27b546d3e549b2efd4403aeb7886b75ace8bd8da38c929e841bc" Jan 04 12:39:03 crc kubenswrapper[5003]: E0104 12:39:03.807804 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:39:14 crc kubenswrapper[5003]: I0104 12:39:14.811567 5003 scope.go:117] "RemoveContainer" containerID="4ddcf79a0d2f27b546d3e549b2efd4403aeb7886b75ace8bd8da38c929e841bc" Jan 04 12:39:14 crc kubenswrapper[5003]: E0104 12:39:14.812667 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:39:26 crc kubenswrapper[5003]: I0104 12:39:26.807392 5003 scope.go:117] "RemoveContainer" containerID="4ddcf79a0d2f27b546d3e549b2efd4403aeb7886b75ace8bd8da38c929e841bc" Jan 04 12:39:26 crc kubenswrapper[5003]: E0104 12:39:26.809271 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:39:37 crc kubenswrapper[5003]: I0104 12:39:37.806553 5003 scope.go:117] "RemoveContainer" containerID="4ddcf79a0d2f27b546d3e549b2efd4403aeb7886b75ace8bd8da38c929e841bc" Jan 04 12:39:37 crc kubenswrapper[5003]: E0104 12:39:37.807587 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:39:50 crc kubenswrapper[5003]: I0104 12:39:50.807334 5003 scope.go:117] "RemoveContainer" containerID="4ddcf79a0d2f27b546d3e549b2efd4403aeb7886b75ace8bd8da38c929e841bc" Jan 04 12:39:50 crc kubenswrapper[5003]: E0104 12:39:50.808472 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:40:05 crc kubenswrapper[5003]: I0104 12:40:05.807525 5003 scope.go:117] "RemoveContainer" containerID="4ddcf79a0d2f27b546d3e549b2efd4403aeb7886b75ace8bd8da38c929e841bc" Jan 04 12:40:05 crc kubenswrapper[5003]: E0104 12:40:05.808752 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:40:18 crc kubenswrapper[5003]: I0104 12:40:18.807813 5003 scope.go:117] "RemoveContainer" containerID="4ddcf79a0d2f27b546d3e549b2efd4403aeb7886b75ace8bd8da38c929e841bc" Jan 04 12:40:18 crc kubenswrapper[5003]: E0104 12:40:18.810544 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:40:32 crc kubenswrapper[5003]: I0104 12:40:32.807028 5003 scope.go:117] "RemoveContainer" containerID="4ddcf79a0d2f27b546d3e549b2efd4403aeb7886b75ace8bd8da38c929e841bc" Jan 04 12:40:32 crc kubenswrapper[5003]: E0104 12:40:32.808710 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:40:46 crc kubenswrapper[5003]: I0104 12:40:46.806758 5003 scope.go:117] "RemoveContainer" containerID="4ddcf79a0d2f27b546d3e549b2efd4403aeb7886b75ace8bd8da38c929e841bc" Jan 04 12:40:46 crc kubenswrapper[5003]: E0104 12:40:46.807519 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:40:57 crc kubenswrapper[5003]: I0104 12:40:57.808393 5003 scope.go:117] "RemoveContainer" containerID="4ddcf79a0d2f27b546d3e549b2efd4403aeb7886b75ace8bd8da38c929e841bc" Jan 04 12:40:57 crc kubenswrapper[5003]: E0104 12:40:57.809686 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:41:12 crc kubenswrapper[5003]: I0104 12:41:12.808189 5003 scope.go:117] "RemoveContainer" containerID="4ddcf79a0d2f27b546d3e549b2efd4403aeb7886b75ace8bd8da38c929e841bc" Jan 04 12:41:12 crc kubenswrapper[5003]: E0104 12:41:12.809524 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:41:27 crc kubenswrapper[5003]: I0104 12:41:27.806979 5003 scope.go:117] "RemoveContainer" containerID="4ddcf79a0d2f27b546d3e549b2efd4403aeb7886b75ace8bd8da38c929e841bc" Jan 04 12:41:27 crc kubenswrapper[5003]: E0104 12:41:27.808051 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:41:41 crc kubenswrapper[5003]: I0104 12:41:41.806639 5003 scope.go:117] "RemoveContainer" containerID="4ddcf79a0d2f27b546d3e549b2efd4403aeb7886b75ace8bd8da38c929e841bc" Jan 04 12:41:41 crc kubenswrapper[5003]: E0104 12:41:41.807424 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:41:55 crc kubenswrapper[5003]: I0104 12:41:55.806598 5003 scope.go:117] "RemoveContainer" containerID="4ddcf79a0d2f27b546d3e549b2efd4403aeb7886b75ace8bd8da38c929e841bc" Jan 04 12:41:55 crc kubenswrapper[5003]: E0104 12:41:55.807719 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:42:09 crc kubenswrapper[5003]: I0104 12:42:09.806772 5003 scope.go:117] "RemoveContainer" containerID="4ddcf79a0d2f27b546d3e549b2efd4403aeb7886b75ace8bd8da38c929e841bc" Jan 04 12:42:10 crc kubenswrapper[5003]: I0104 12:42:10.665982 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerStarted","Data":"75b2468fe392af0f5ba5e903625df0d783eabdb12a668e937157d68bb525adb8"} Jan 04 12:43:05 crc kubenswrapper[5003]: I0104 12:43:05.390830 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2mt9s"] Jan 04 12:43:05 crc kubenswrapper[5003]: E0104 12:43:05.392280 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="765f90a8-41c1-4188-ae8a-ff526a56ad35" containerName="extract-utilities" Jan 04 12:43:05 crc kubenswrapper[5003]: I0104 12:43:05.392297 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="765f90a8-41c1-4188-ae8a-ff526a56ad35" containerName="extract-utilities" Jan 04 12:43:05 crc kubenswrapper[5003]: E0104 12:43:05.392321 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="765f90a8-41c1-4188-ae8a-ff526a56ad35" containerName="registry-server" Jan 04 12:43:05 crc kubenswrapper[5003]: I0104 12:43:05.392354 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="765f90a8-41c1-4188-ae8a-ff526a56ad35" containerName="registry-server" Jan 04 12:43:05 crc kubenswrapper[5003]: E0104 12:43:05.392368 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="765f90a8-41c1-4188-ae8a-ff526a56ad35" containerName="extract-content" Jan 04 12:43:05 crc kubenswrapper[5003]: I0104 12:43:05.392376 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="765f90a8-41c1-4188-ae8a-ff526a56ad35" containerName="extract-content" Jan 04 12:43:05 crc kubenswrapper[5003]: I0104 12:43:05.392582 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="765f90a8-41c1-4188-ae8a-ff526a56ad35" containerName="registry-server" Jan 04 12:43:05 crc kubenswrapper[5003]: I0104 12:43:05.393792 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2mt9s" Jan 04 12:43:05 crc kubenswrapper[5003]: I0104 12:43:05.406429 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2mt9s"] Jan 04 12:43:05 crc kubenswrapper[5003]: I0104 12:43:05.573741 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4579abbe-c323-450b-afb6-a76b4102f805-utilities\") pod \"certified-operators-2mt9s\" (UID: \"4579abbe-c323-450b-afb6-a76b4102f805\") " pod="openshift-marketplace/certified-operators-2mt9s" Jan 04 12:43:05 crc kubenswrapper[5003]: I0104 12:43:05.573816 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4rmg\" (UniqueName: \"kubernetes.io/projected/4579abbe-c323-450b-afb6-a76b4102f805-kube-api-access-h4rmg\") pod \"certified-operators-2mt9s\" (UID: \"4579abbe-c323-450b-afb6-a76b4102f805\") " pod="openshift-marketplace/certified-operators-2mt9s" Jan 04 12:43:05 crc kubenswrapper[5003]: I0104 12:43:05.573860 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4579abbe-c323-450b-afb6-a76b4102f805-catalog-content\") pod \"certified-operators-2mt9s\" (UID: \"4579abbe-c323-450b-afb6-a76b4102f805\") " pod="openshift-marketplace/certified-operators-2mt9s" Jan 04 12:43:05 crc kubenswrapper[5003]: I0104 12:43:05.675982 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4rmg\" (UniqueName: \"kubernetes.io/projected/4579abbe-c323-450b-afb6-a76b4102f805-kube-api-access-h4rmg\") pod \"certified-operators-2mt9s\" (UID: \"4579abbe-c323-450b-afb6-a76b4102f805\") " pod="openshift-marketplace/certified-operators-2mt9s" Jan 04 12:43:05 crc kubenswrapper[5003]: I0104 12:43:05.676058 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4579abbe-c323-450b-afb6-a76b4102f805-catalog-content\") pod \"certified-operators-2mt9s\" (UID: \"4579abbe-c323-450b-afb6-a76b4102f805\") " pod="openshift-marketplace/certified-operators-2mt9s" Jan 04 12:43:05 crc kubenswrapper[5003]: I0104 12:43:05.676185 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4579abbe-c323-450b-afb6-a76b4102f805-utilities\") pod \"certified-operators-2mt9s\" (UID: \"4579abbe-c323-450b-afb6-a76b4102f805\") " pod="openshift-marketplace/certified-operators-2mt9s" Jan 04 12:43:05 crc kubenswrapper[5003]: I0104 12:43:05.676808 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4579abbe-c323-450b-afb6-a76b4102f805-utilities\") pod \"certified-operators-2mt9s\" (UID: \"4579abbe-c323-450b-afb6-a76b4102f805\") " pod="openshift-marketplace/certified-operators-2mt9s" Jan 04 12:43:05 crc kubenswrapper[5003]: I0104 12:43:05.676964 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4579abbe-c323-450b-afb6-a76b4102f805-catalog-content\") pod \"certified-operators-2mt9s\" (UID: \"4579abbe-c323-450b-afb6-a76b4102f805\") " pod="openshift-marketplace/certified-operators-2mt9s" Jan 04 12:43:05 crc kubenswrapper[5003]: I0104 12:43:05.698426 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4rmg\" (UniqueName: \"kubernetes.io/projected/4579abbe-c323-450b-afb6-a76b4102f805-kube-api-access-h4rmg\") pod \"certified-operators-2mt9s\" (UID: \"4579abbe-c323-450b-afb6-a76b4102f805\") " pod="openshift-marketplace/certified-operators-2mt9s" Jan 04 12:43:05 crc kubenswrapper[5003]: I0104 12:43:05.720833 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2mt9s" Jan 04 12:43:06 crc kubenswrapper[5003]: I0104 12:43:06.259891 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2mt9s"] Jan 04 12:43:07 crc kubenswrapper[5003]: I0104 12:43:07.171908 5003 generic.go:334] "Generic (PLEG): container finished" podID="4579abbe-c323-450b-afb6-a76b4102f805" containerID="b72c2100017c07fe790434509a92e2660bfec377ab571a004ff64afa420b2771" exitCode=0 Jan 04 12:43:07 crc kubenswrapper[5003]: I0104 12:43:07.171988 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2mt9s" event={"ID":"4579abbe-c323-450b-afb6-a76b4102f805","Type":"ContainerDied","Data":"b72c2100017c07fe790434509a92e2660bfec377ab571a004ff64afa420b2771"} Jan 04 12:43:07 crc kubenswrapper[5003]: I0104 12:43:07.172072 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2mt9s" event={"ID":"4579abbe-c323-450b-afb6-a76b4102f805","Type":"ContainerStarted","Data":"5834c2490474743fabb9c5dfb47dabcd96a0e3dc18dae454e0ff490944ada71f"} Jan 04 12:43:07 crc kubenswrapper[5003]: I0104 12:43:07.175454 5003 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 04 12:43:08 crc kubenswrapper[5003]: I0104 12:43:08.184295 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2mt9s" event={"ID":"4579abbe-c323-450b-afb6-a76b4102f805","Type":"ContainerStarted","Data":"ff64c2598bc9543d4305c6b26bc74e301c2a8f761b0cfa274f785e487e92417a"} Jan 04 12:43:09 crc kubenswrapper[5003]: I0104 12:43:09.209462 5003 generic.go:334] "Generic (PLEG): container finished" podID="4579abbe-c323-450b-afb6-a76b4102f805" containerID="ff64c2598bc9543d4305c6b26bc74e301c2a8f761b0cfa274f785e487e92417a" exitCode=0 Jan 04 12:43:09 crc kubenswrapper[5003]: I0104 12:43:09.211512 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2mt9s" event={"ID":"4579abbe-c323-450b-afb6-a76b4102f805","Type":"ContainerDied","Data":"ff64c2598bc9543d4305c6b26bc74e301c2a8f761b0cfa274f785e487e92417a"} Jan 04 12:43:10 crc kubenswrapper[5003]: I0104 12:43:10.223419 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2mt9s" event={"ID":"4579abbe-c323-450b-afb6-a76b4102f805","Type":"ContainerStarted","Data":"439f8c68176928ef4a2f0367619f7c788facf3132fa189086982e81b6f86c4b1"} Jan 04 12:43:10 crc kubenswrapper[5003]: I0104 12:43:10.252006 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2mt9s" podStartSLOduration=2.76587482 podStartE2EDuration="5.251977377s" podCreationTimestamp="2026-01-04 12:43:05 +0000 UTC" firstStartedPulling="2026-01-04 12:43:07.175078326 +0000 UTC m=+3302.648108187" lastFinishedPulling="2026-01-04 12:43:09.661180893 +0000 UTC m=+3305.134210744" observedRunningTime="2026-01-04 12:43:10.24697937 +0000 UTC m=+3305.720009241" watchObservedRunningTime="2026-01-04 12:43:10.251977377 +0000 UTC m=+3305.725007228" Jan 04 12:43:15 crc kubenswrapper[5003]: I0104 12:43:15.722489 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2mt9s" Jan 04 12:43:15 crc kubenswrapper[5003]: I0104 12:43:15.723212 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2mt9s" Jan 04 12:43:15 crc kubenswrapper[5003]: I0104 12:43:15.793656 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2mt9s" Jan 04 12:43:16 crc kubenswrapper[5003]: I0104 12:43:16.314589 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2mt9s" Jan 04 12:43:16 crc kubenswrapper[5003]: I0104 12:43:16.366222 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2mt9s"] Jan 04 12:43:18 crc kubenswrapper[5003]: I0104 12:43:18.290487 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2mt9s" podUID="4579abbe-c323-450b-afb6-a76b4102f805" containerName="registry-server" containerID="cri-o://439f8c68176928ef4a2f0367619f7c788facf3132fa189086982e81b6f86c4b1" gracePeriod=2 Jan 04 12:43:18 crc kubenswrapper[5003]: I0104 12:43:18.738146 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2mt9s" Jan 04 12:43:18 crc kubenswrapper[5003]: I0104 12:43:18.812389 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h4rmg\" (UniqueName: \"kubernetes.io/projected/4579abbe-c323-450b-afb6-a76b4102f805-kube-api-access-h4rmg\") pod \"4579abbe-c323-450b-afb6-a76b4102f805\" (UID: \"4579abbe-c323-450b-afb6-a76b4102f805\") " Jan 04 12:43:18 crc kubenswrapper[5003]: I0104 12:43:18.812547 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4579abbe-c323-450b-afb6-a76b4102f805-utilities\") pod \"4579abbe-c323-450b-afb6-a76b4102f805\" (UID: \"4579abbe-c323-450b-afb6-a76b4102f805\") " Jan 04 12:43:18 crc kubenswrapper[5003]: I0104 12:43:18.812585 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4579abbe-c323-450b-afb6-a76b4102f805-catalog-content\") pod \"4579abbe-c323-450b-afb6-a76b4102f805\" (UID: \"4579abbe-c323-450b-afb6-a76b4102f805\") " Jan 04 12:43:18 crc kubenswrapper[5003]: I0104 12:43:18.813539 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4579abbe-c323-450b-afb6-a76b4102f805-utilities" (OuterVolumeSpecName: "utilities") pod "4579abbe-c323-450b-afb6-a76b4102f805" (UID: "4579abbe-c323-450b-afb6-a76b4102f805"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:43:18 crc kubenswrapper[5003]: I0104 12:43:18.819107 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4579abbe-c323-450b-afb6-a76b4102f805-kube-api-access-h4rmg" (OuterVolumeSpecName: "kube-api-access-h4rmg") pod "4579abbe-c323-450b-afb6-a76b4102f805" (UID: "4579abbe-c323-450b-afb6-a76b4102f805"). InnerVolumeSpecName "kube-api-access-h4rmg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:43:18 crc kubenswrapper[5003]: I0104 12:43:18.915338 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4579abbe-c323-450b-afb6-a76b4102f805-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:43:18 crc kubenswrapper[5003]: I0104 12:43:18.915403 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h4rmg\" (UniqueName: \"kubernetes.io/projected/4579abbe-c323-450b-afb6-a76b4102f805-kube-api-access-h4rmg\") on node \"crc\" DevicePath \"\"" Jan 04 12:43:18 crc kubenswrapper[5003]: I0104 12:43:18.930365 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4579abbe-c323-450b-afb6-a76b4102f805-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4579abbe-c323-450b-afb6-a76b4102f805" (UID: "4579abbe-c323-450b-afb6-a76b4102f805"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:43:19 crc kubenswrapper[5003]: I0104 12:43:19.017370 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4579abbe-c323-450b-afb6-a76b4102f805-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:43:19 crc kubenswrapper[5003]: I0104 12:43:19.303850 5003 generic.go:334] "Generic (PLEG): container finished" podID="4579abbe-c323-450b-afb6-a76b4102f805" containerID="439f8c68176928ef4a2f0367619f7c788facf3132fa189086982e81b6f86c4b1" exitCode=0 Jan 04 12:43:19 crc kubenswrapper[5003]: I0104 12:43:19.303903 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2mt9s" Jan 04 12:43:19 crc kubenswrapper[5003]: I0104 12:43:19.303934 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2mt9s" event={"ID":"4579abbe-c323-450b-afb6-a76b4102f805","Type":"ContainerDied","Data":"439f8c68176928ef4a2f0367619f7c788facf3132fa189086982e81b6f86c4b1"} Jan 04 12:43:19 crc kubenswrapper[5003]: I0104 12:43:19.304057 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2mt9s" event={"ID":"4579abbe-c323-450b-afb6-a76b4102f805","Type":"ContainerDied","Data":"5834c2490474743fabb9c5dfb47dabcd96a0e3dc18dae454e0ff490944ada71f"} Jan 04 12:43:19 crc kubenswrapper[5003]: I0104 12:43:19.304114 5003 scope.go:117] "RemoveContainer" containerID="439f8c68176928ef4a2f0367619f7c788facf3132fa189086982e81b6f86c4b1" Jan 04 12:43:19 crc kubenswrapper[5003]: I0104 12:43:19.332481 5003 scope.go:117] "RemoveContainer" containerID="ff64c2598bc9543d4305c6b26bc74e301c2a8f761b0cfa274f785e487e92417a" Jan 04 12:43:19 crc kubenswrapper[5003]: I0104 12:43:19.350765 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2mt9s"] Jan 04 12:43:19 crc kubenswrapper[5003]: I0104 12:43:19.356993 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2mt9s"] Jan 04 12:43:19 crc kubenswrapper[5003]: I0104 12:43:19.372644 5003 scope.go:117] "RemoveContainer" containerID="b72c2100017c07fe790434509a92e2660bfec377ab571a004ff64afa420b2771" Jan 04 12:43:19 crc kubenswrapper[5003]: I0104 12:43:19.420862 5003 scope.go:117] "RemoveContainer" containerID="439f8c68176928ef4a2f0367619f7c788facf3132fa189086982e81b6f86c4b1" Jan 04 12:43:19 crc kubenswrapper[5003]: E0104 12:43:19.422039 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"439f8c68176928ef4a2f0367619f7c788facf3132fa189086982e81b6f86c4b1\": container with ID starting with 439f8c68176928ef4a2f0367619f7c788facf3132fa189086982e81b6f86c4b1 not found: ID does not exist" containerID="439f8c68176928ef4a2f0367619f7c788facf3132fa189086982e81b6f86c4b1" Jan 04 12:43:19 crc kubenswrapper[5003]: I0104 12:43:19.422106 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"439f8c68176928ef4a2f0367619f7c788facf3132fa189086982e81b6f86c4b1"} err="failed to get container status \"439f8c68176928ef4a2f0367619f7c788facf3132fa189086982e81b6f86c4b1\": rpc error: code = NotFound desc = could not find container \"439f8c68176928ef4a2f0367619f7c788facf3132fa189086982e81b6f86c4b1\": container with ID starting with 439f8c68176928ef4a2f0367619f7c788facf3132fa189086982e81b6f86c4b1 not found: ID does not exist" Jan 04 12:43:19 crc kubenswrapper[5003]: I0104 12:43:19.422156 5003 scope.go:117] "RemoveContainer" containerID="ff64c2598bc9543d4305c6b26bc74e301c2a8f761b0cfa274f785e487e92417a" Jan 04 12:43:19 crc kubenswrapper[5003]: E0104 12:43:19.424247 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff64c2598bc9543d4305c6b26bc74e301c2a8f761b0cfa274f785e487e92417a\": container with ID starting with ff64c2598bc9543d4305c6b26bc74e301c2a8f761b0cfa274f785e487e92417a not found: ID does not exist" containerID="ff64c2598bc9543d4305c6b26bc74e301c2a8f761b0cfa274f785e487e92417a" Jan 04 12:43:19 crc kubenswrapper[5003]: I0104 12:43:19.424322 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff64c2598bc9543d4305c6b26bc74e301c2a8f761b0cfa274f785e487e92417a"} err="failed to get container status \"ff64c2598bc9543d4305c6b26bc74e301c2a8f761b0cfa274f785e487e92417a\": rpc error: code = NotFound desc = could not find container \"ff64c2598bc9543d4305c6b26bc74e301c2a8f761b0cfa274f785e487e92417a\": container with ID starting with ff64c2598bc9543d4305c6b26bc74e301c2a8f761b0cfa274f785e487e92417a not found: ID does not exist" Jan 04 12:43:19 crc kubenswrapper[5003]: I0104 12:43:19.424364 5003 scope.go:117] "RemoveContainer" containerID="b72c2100017c07fe790434509a92e2660bfec377ab571a004ff64afa420b2771" Jan 04 12:43:19 crc kubenswrapper[5003]: E0104 12:43:19.424870 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b72c2100017c07fe790434509a92e2660bfec377ab571a004ff64afa420b2771\": container with ID starting with b72c2100017c07fe790434509a92e2660bfec377ab571a004ff64afa420b2771 not found: ID does not exist" containerID="b72c2100017c07fe790434509a92e2660bfec377ab571a004ff64afa420b2771" Jan 04 12:43:19 crc kubenswrapper[5003]: I0104 12:43:19.424964 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b72c2100017c07fe790434509a92e2660bfec377ab571a004ff64afa420b2771"} err="failed to get container status \"b72c2100017c07fe790434509a92e2660bfec377ab571a004ff64afa420b2771\": rpc error: code = NotFound desc = could not find container \"b72c2100017c07fe790434509a92e2660bfec377ab571a004ff64afa420b2771\": container with ID starting with b72c2100017c07fe790434509a92e2660bfec377ab571a004ff64afa420b2771 not found: ID does not exist" Jan 04 12:43:20 crc kubenswrapper[5003]: I0104 12:43:20.820529 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4579abbe-c323-450b-afb6-a76b4102f805" path="/var/lib/kubelet/pods/4579abbe-c323-450b-afb6-a76b4102f805/volumes" Jan 04 12:44:09 crc kubenswrapper[5003]: I0104 12:44:09.418739 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:44:09 crc kubenswrapper[5003]: I0104 12:44:09.419348 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:44:39 crc kubenswrapper[5003]: I0104 12:44:39.418789 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:44:39 crc kubenswrapper[5003]: I0104 12:44:39.419746 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:45:00 crc kubenswrapper[5003]: I0104 12:45:00.148457 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458845-8mwmv"] Jan 04 12:45:00 crc kubenswrapper[5003]: E0104 12:45:00.149451 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4579abbe-c323-450b-afb6-a76b4102f805" containerName="extract-content" Jan 04 12:45:00 crc kubenswrapper[5003]: I0104 12:45:00.149468 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="4579abbe-c323-450b-afb6-a76b4102f805" containerName="extract-content" Jan 04 12:45:00 crc kubenswrapper[5003]: E0104 12:45:00.149495 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4579abbe-c323-450b-afb6-a76b4102f805" containerName="registry-server" Jan 04 12:45:00 crc kubenswrapper[5003]: I0104 12:45:00.149508 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="4579abbe-c323-450b-afb6-a76b4102f805" containerName="registry-server" Jan 04 12:45:00 crc kubenswrapper[5003]: E0104 12:45:00.149521 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4579abbe-c323-450b-afb6-a76b4102f805" containerName="extract-utilities" Jan 04 12:45:00 crc kubenswrapper[5003]: I0104 12:45:00.149530 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="4579abbe-c323-450b-afb6-a76b4102f805" containerName="extract-utilities" Jan 04 12:45:00 crc kubenswrapper[5003]: I0104 12:45:00.149697 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="4579abbe-c323-450b-afb6-a76b4102f805" containerName="registry-server" Jan 04 12:45:00 crc kubenswrapper[5003]: I0104 12:45:00.150227 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458845-8mwmv" Jan 04 12:45:00 crc kubenswrapper[5003]: I0104 12:45:00.151849 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 04 12:45:00 crc kubenswrapper[5003]: I0104 12:45:00.157804 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 04 12:45:00 crc kubenswrapper[5003]: I0104 12:45:00.163758 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458845-8mwmv"] Jan 04 12:45:00 crc kubenswrapper[5003]: I0104 12:45:00.289823 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4kqpr\" (UniqueName: \"kubernetes.io/projected/bad35f44-6393-477a-bd8d-89eee9d7b405-kube-api-access-4kqpr\") pod \"collect-profiles-29458845-8mwmv\" (UID: \"bad35f44-6393-477a-bd8d-89eee9d7b405\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458845-8mwmv" Jan 04 12:45:00 crc kubenswrapper[5003]: I0104 12:45:00.289903 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bad35f44-6393-477a-bd8d-89eee9d7b405-secret-volume\") pod \"collect-profiles-29458845-8mwmv\" (UID: \"bad35f44-6393-477a-bd8d-89eee9d7b405\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458845-8mwmv" Jan 04 12:45:00 crc kubenswrapper[5003]: I0104 12:45:00.289935 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bad35f44-6393-477a-bd8d-89eee9d7b405-config-volume\") pod \"collect-profiles-29458845-8mwmv\" (UID: \"bad35f44-6393-477a-bd8d-89eee9d7b405\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458845-8mwmv" Jan 04 12:45:00 crc kubenswrapper[5003]: I0104 12:45:00.391711 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4kqpr\" (UniqueName: \"kubernetes.io/projected/bad35f44-6393-477a-bd8d-89eee9d7b405-kube-api-access-4kqpr\") pod \"collect-profiles-29458845-8mwmv\" (UID: \"bad35f44-6393-477a-bd8d-89eee9d7b405\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458845-8mwmv" Jan 04 12:45:00 crc kubenswrapper[5003]: I0104 12:45:00.391777 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bad35f44-6393-477a-bd8d-89eee9d7b405-secret-volume\") pod \"collect-profiles-29458845-8mwmv\" (UID: \"bad35f44-6393-477a-bd8d-89eee9d7b405\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458845-8mwmv" Jan 04 12:45:00 crc kubenswrapper[5003]: I0104 12:45:00.391797 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bad35f44-6393-477a-bd8d-89eee9d7b405-config-volume\") pod \"collect-profiles-29458845-8mwmv\" (UID: \"bad35f44-6393-477a-bd8d-89eee9d7b405\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458845-8mwmv" Jan 04 12:45:00 crc kubenswrapper[5003]: I0104 12:45:00.392767 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bad35f44-6393-477a-bd8d-89eee9d7b405-config-volume\") pod \"collect-profiles-29458845-8mwmv\" (UID: \"bad35f44-6393-477a-bd8d-89eee9d7b405\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458845-8mwmv" Jan 04 12:45:00 crc kubenswrapper[5003]: I0104 12:45:00.398499 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bad35f44-6393-477a-bd8d-89eee9d7b405-secret-volume\") pod \"collect-profiles-29458845-8mwmv\" (UID: \"bad35f44-6393-477a-bd8d-89eee9d7b405\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458845-8mwmv" Jan 04 12:45:00 crc kubenswrapper[5003]: I0104 12:45:00.410531 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4kqpr\" (UniqueName: \"kubernetes.io/projected/bad35f44-6393-477a-bd8d-89eee9d7b405-kube-api-access-4kqpr\") pod \"collect-profiles-29458845-8mwmv\" (UID: \"bad35f44-6393-477a-bd8d-89eee9d7b405\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458845-8mwmv" Jan 04 12:45:00 crc kubenswrapper[5003]: I0104 12:45:00.469314 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458845-8mwmv" Jan 04 12:45:00 crc kubenswrapper[5003]: I0104 12:45:00.882538 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458845-8mwmv"] Jan 04 12:45:01 crc kubenswrapper[5003]: I0104 12:45:01.236519 5003 generic.go:334] "Generic (PLEG): container finished" podID="bad35f44-6393-477a-bd8d-89eee9d7b405" containerID="edf633f3a0fdcb51d4fba5994408f07c9258fe0a35404de1aa1a701716f125fe" exitCode=0 Jan 04 12:45:01 crc kubenswrapper[5003]: I0104 12:45:01.236576 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458845-8mwmv" event={"ID":"bad35f44-6393-477a-bd8d-89eee9d7b405","Type":"ContainerDied","Data":"edf633f3a0fdcb51d4fba5994408f07c9258fe0a35404de1aa1a701716f125fe"} Jan 04 12:45:01 crc kubenswrapper[5003]: I0104 12:45:01.236631 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458845-8mwmv" event={"ID":"bad35f44-6393-477a-bd8d-89eee9d7b405","Type":"ContainerStarted","Data":"dce1f3ea9091e7eb255abfe338b54af266b1af18bcf6c9ca8f749a89fdc0a1f2"} Jan 04 12:45:02 crc kubenswrapper[5003]: I0104 12:45:02.582173 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458845-8mwmv" Jan 04 12:45:02 crc kubenswrapper[5003]: I0104 12:45:02.744215 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bad35f44-6393-477a-bd8d-89eee9d7b405-config-volume\") pod \"bad35f44-6393-477a-bd8d-89eee9d7b405\" (UID: \"bad35f44-6393-477a-bd8d-89eee9d7b405\") " Jan 04 12:45:02 crc kubenswrapper[5003]: I0104 12:45:02.744280 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4kqpr\" (UniqueName: \"kubernetes.io/projected/bad35f44-6393-477a-bd8d-89eee9d7b405-kube-api-access-4kqpr\") pod \"bad35f44-6393-477a-bd8d-89eee9d7b405\" (UID: \"bad35f44-6393-477a-bd8d-89eee9d7b405\") " Jan 04 12:45:02 crc kubenswrapper[5003]: I0104 12:45:02.744424 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bad35f44-6393-477a-bd8d-89eee9d7b405-secret-volume\") pod \"bad35f44-6393-477a-bd8d-89eee9d7b405\" (UID: \"bad35f44-6393-477a-bd8d-89eee9d7b405\") " Jan 04 12:45:02 crc kubenswrapper[5003]: I0104 12:45:02.744980 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bad35f44-6393-477a-bd8d-89eee9d7b405-config-volume" (OuterVolumeSpecName: "config-volume") pod "bad35f44-6393-477a-bd8d-89eee9d7b405" (UID: "bad35f44-6393-477a-bd8d-89eee9d7b405"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 12:45:02 crc kubenswrapper[5003]: I0104 12:45:02.749751 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bad35f44-6393-477a-bd8d-89eee9d7b405-kube-api-access-4kqpr" (OuterVolumeSpecName: "kube-api-access-4kqpr") pod "bad35f44-6393-477a-bd8d-89eee9d7b405" (UID: "bad35f44-6393-477a-bd8d-89eee9d7b405"). InnerVolumeSpecName "kube-api-access-4kqpr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:45:02 crc kubenswrapper[5003]: I0104 12:45:02.758205 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bad35f44-6393-477a-bd8d-89eee9d7b405-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "bad35f44-6393-477a-bd8d-89eee9d7b405" (UID: "bad35f44-6393-477a-bd8d-89eee9d7b405"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 12:45:02 crc kubenswrapper[5003]: I0104 12:45:02.846100 5003 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bad35f44-6393-477a-bd8d-89eee9d7b405-config-volume\") on node \"crc\" DevicePath \"\"" Jan 04 12:45:02 crc kubenswrapper[5003]: I0104 12:45:02.846129 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4kqpr\" (UniqueName: \"kubernetes.io/projected/bad35f44-6393-477a-bd8d-89eee9d7b405-kube-api-access-4kqpr\") on node \"crc\" DevicePath \"\"" Jan 04 12:45:02 crc kubenswrapper[5003]: I0104 12:45:02.846142 5003 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bad35f44-6393-477a-bd8d-89eee9d7b405-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 04 12:45:03 crc kubenswrapper[5003]: I0104 12:45:03.254499 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458845-8mwmv" event={"ID":"bad35f44-6393-477a-bd8d-89eee9d7b405","Type":"ContainerDied","Data":"dce1f3ea9091e7eb255abfe338b54af266b1af18bcf6c9ca8f749a89fdc0a1f2"} Jan 04 12:45:03 crc kubenswrapper[5003]: I0104 12:45:03.254567 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dce1f3ea9091e7eb255abfe338b54af266b1af18bcf6c9ca8f749a89fdc0a1f2" Jan 04 12:45:03 crc kubenswrapper[5003]: I0104 12:45:03.254562 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458845-8mwmv" Jan 04 12:45:03 crc kubenswrapper[5003]: I0104 12:45:03.676362 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d"] Jan 04 12:45:03 crc kubenswrapper[5003]: I0104 12:45:03.681902 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458800-bs74d"] Jan 04 12:45:04 crc kubenswrapper[5003]: I0104 12:45:04.822979 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4eeaa07e-7d61-40e2-a855-de1d1e337838" path="/var/lib/kubelet/pods/4eeaa07e-7d61-40e2-a855-de1d1e337838/volumes" Jan 04 12:45:09 crc kubenswrapper[5003]: I0104 12:45:09.419184 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:45:09 crc kubenswrapper[5003]: I0104 12:45:09.419909 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:45:09 crc kubenswrapper[5003]: I0104 12:45:09.419982 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 12:45:09 crc kubenswrapper[5003]: I0104 12:45:09.420987 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"75b2468fe392af0f5ba5e903625df0d783eabdb12a668e937157d68bb525adb8"} pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 12:45:09 crc kubenswrapper[5003]: I0104 12:45:09.421105 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" containerID="cri-o://75b2468fe392af0f5ba5e903625df0d783eabdb12a668e937157d68bb525adb8" gracePeriod=600 Jan 04 12:45:10 crc kubenswrapper[5003]: I0104 12:45:10.314839 5003 generic.go:334] "Generic (PLEG): container finished" podID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerID="75b2468fe392af0f5ba5e903625df0d783eabdb12a668e937157d68bb525adb8" exitCode=0 Jan 04 12:45:10 crc kubenswrapper[5003]: I0104 12:45:10.314909 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerDied","Data":"75b2468fe392af0f5ba5e903625df0d783eabdb12a668e937157d68bb525adb8"} Jan 04 12:45:10 crc kubenswrapper[5003]: I0104 12:45:10.315369 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerStarted","Data":"8d0857e0bb3abdaa409575389676fe5a44a7fd3ee14dc99a80c7ebfd877a8c32"} Jan 04 12:45:10 crc kubenswrapper[5003]: I0104 12:45:10.315398 5003 scope.go:117] "RemoveContainer" containerID="4ddcf79a0d2f27b546d3e549b2efd4403aeb7886b75ace8bd8da38c929e841bc" Jan 04 12:45:13 crc kubenswrapper[5003]: I0104 12:45:13.438452 5003 scope.go:117] "RemoveContainer" containerID="1540e3f3ff17ed81708c703ab7eafa75d81aaf990f596cb4dc9d43026651f818" Jan 04 12:45:19 crc kubenswrapper[5003]: I0104 12:45:19.089837 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6trpn"] Jan 04 12:45:19 crc kubenswrapper[5003]: E0104 12:45:19.091647 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bad35f44-6393-477a-bd8d-89eee9d7b405" containerName="collect-profiles" Jan 04 12:45:19 crc kubenswrapper[5003]: I0104 12:45:19.091719 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="bad35f44-6393-477a-bd8d-89eee9d7b405" containerName="collect-profiles" Jan 04 12:45:19 crc kubenswrapper[5003]: I0104 12:45:19.091918 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="bad35f44-6393-477a-bd8d-89eee9d7b405" containerName="collect-profiles" Jan 04 12:45:19 crc kubenswrapper[5003]: I0104 12:45:19.092937 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6trpn" Jan 04 12:45:19 crc kubenswrapper[5003]: I0104 12:45:19.105958 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6trpn"] Jan 04 12:45:19 crc kubenswrapper[5003]: I0104 12:45:19.206759 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6nppz\" (UniqueName: \"kubernetes.io/projected/328cf03f-6d61-499e-bac4-b9a9978695c2-kube-api-access-6nppz\") pod \"community-operators-6trpn\" (UID: \"328cf03f-6d61-499e-bac4-b9a9978695c2\") " pod="openshift-marketplace/community-operators-6trpn" Jan 04 12:45:19 crc kubenswrapper[5003]: I0104 12:45:19.207094 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/328cf03f-6d61-499e-bac4-b9a9978695c2-catalog-content\") pod \"community-operators-6trpn\" (UID: \"328cf03f-6d61-499e-bac4-b9a9978695c2\") " pod="openshift-marketplace/community-operators-6trpn" Jan 04 12:45:19 crc kubenswrapper[5003]: I0104 12:45:19.207197 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/328cf03f-6d61-499e-bac4-b9a9978695c2-utilities\") pod \"community-operators-6trpn\" (UID: \"328cf03f-6d61-499e-bac4-b9a9978695c2\") " pod="openshift-marketplace/community-operators-6trpn" Jan 04 12:45:19 crc kubenswrapper[5003]: I0104 12:45:19.308335 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/328cf03f-6d61-499e-bac4-b9a9978695c2-catalog-content\") pod \"community-operators-6trpn\" (UID: \"328cf03f-6d61-499e-bac4-b9a9978695c2\") " pod="openshift-marketplace/community-operators-6trpn" Jan 04 12:45:19 crc kubenswrapper[5003]: I0104 12:45:19.308649 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/328cf03f-6d61-499e-bac4-b9a9978695c2-utilities\") pod \"community-operators-6trpn\" (UID: \"328cf03f-6d61-499e-bac4-b9a9978695c2\") " pod="openshift-marketplace/community-operators-6trpn" Jan 04 12:45:19 crc kubenswrapper[5003]: I0104 12:45:19.308816 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6nppz\" (UniqueName: \"kubernetes.io/projected/328cf03f-6d61-499e-bac4-b9a9978695c2-kube-api-access-6nppz\") pod \"community-operators-6trpn\" (UID: \"328cf03f-6d61-499e-bac4-b9a9978695c2\") " pod="openshift-marketplace/community-operators-6trpn" Jan 04 12:45:19 crc kubenswrapper[5003]: I0104 12:45:19.309230 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/328cf03f-6d61-499e-bac4-b9a9978695c2-catalog-content\") pod \"community-operators-6trpn\" (UID: \"328cf03f-6d61-499e-bac4-b9a9978695c2\") " pod="openshift-marketplace/community-operators-6trpn" Jan 04 12:45:19 crc kubenswrapper[5003]: I0104 12:45:19.309254 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/328cf03f-6d61-499e-bac4-b9a9978695c2-utilities\") pod \"community-operators-6trpn\" (UID: \"328cf03f-6d61-499e-bac4-b9a9978695c2\") " pod="openshift-marketplace/community-operators-6trpn" Jan 04 12:45:19 crc kubenswrapper[5003]: I0104 12:45:19.332809 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6nppz\" (UniqueName: \"kubernetes.io/projected/328cf03f-6d61-499e-bac4-b9a9978695c2-kube-api-access-6nppz\") pod \"community-operators-6trpn\" (UID: \"328cf03f-6d61-499e-bac4-b9a9978695c2\") " pod="openshift-marketplace/community-operators-6trpn" Jan 04 12:45:19 crc kubenswrapper[5003]: I0104 12:45:19.412606 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6trpn" Jan 04 12:45:19 crc kubenswrapper[5003]: I0104 12:45:19.707965 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6trpn"] Jan 04 12:45:20 crc kubenswrapper[5003]: I0104 12:45:20.411471 5003 generic.go:334] "Generic (PLEG): container finished" podID="328cf03f-6d61-499e-bac4-b9a9978695c2" containerID="b8e94aac611b60b7bce843b1ed87c0819e5e57563b5bf7ef66d83dff46ff5280" exitCode=0 Jan 04 12:45:20 crc kubenswrapper[5003]: I0104 12:45:20.411521 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6trpn" event={"ID":"328cf03f-6d61-499e-bac4-b9a9978695c2","Type":"ContainerDied","Data":"b8e94aac611b60b7bce843b1ed87c0819e5e57563b5bf7ef66d83dff46ff5280"} Jan 04 12:45:20 crc kubenswrapper[5003]: I0104 12:45:20.411550 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6trpn" event={"ID":"328cf03f-6d61-499e-bac4-b9a9978695c2","Type":"ContainerStarted","Data":"6689bb72bdcd864ed415df26842a8ced53bb337e0f20bf6101473fa0198bea85"} Jan 04 12:45:21 crc kubenswrapper[5003]: I0104 12:45:21.422624 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6trpn" event={"ID":"328cf03f-6d61-499e-bac4-b9a9978695c2","Type":"ContainerStarted","Data":"15f7c341f41d2bb7a0afb4994636d3b0cdd984a5fead1555c22939f40b93b438"} Jan 04 12:45:22 crc kubenswrapper[5003]: I0104 12:45:22.440458 5003 generic.go:334] "Generic (PLEG): container finished" podID="328cf03f-6d61-499e-bac4-b9a9978695c2" containerID="15f7c341f41d2bb7a0afb4994636d3b0cdd984a5fead1555c22939f40b93b438" exitCode=0 Jan 04 12:45:22 crc kubenswrapper[5003]: I0104 12:45:22.440526 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6trpn" event={"ID":"328cf03f-6d61-499e-bac4-b9a9978695c2","Type":"ContainerDied","Data":"15f7c341f41d2bb7a0afb4994636d3b0cdd984a5fead1555c22939f40b93b438"} Jan 04 12:45:23 crc kubenswrapper[5003]: I0104 12:45:23.450463 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6trpn" event={"ID":"328cf03f-6d61-499e-bac4-b9a9978695c2","Type":"ContainerStarted","Data":"a63bb8139721ac898514fe204a9e604685397607fbf603baae89a14f84c660da"} Jan 04 12:45:23 crc kubenswrapper[5003]: I0104 12:45:23.485288 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6trpn" podStartSLOduration=2.043724632 podStartE2EDuration="4.485264512s" podCreationTimestamp="2026-01-04 12:45:19 +0000 UTC" firstStartedPulling="2026-01-04 12:45:20.413444739 +0000 UTC m=+3435.886474570" lastFinishedPulling="2026-01-04 12:45:22.854984599 +0000 UTC m=+3438.328014450" observedRunningTime="2026-01-04 12:45:23.480349069 +0000 UTC m=+3438.953378920" watchObservedRunningTime="2026-01-04 12:45:23.485264512 +0000 UTC m=+3438.958294363" Jan 04 12:45:29 crc kubenswrapper[5003]: I0104 12:45:29.414105 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6trpn" Jan 04 12:45:29 crc kubenswrapper[5003]: I0104 12:45:29.414961 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6trpn" Jan 04 12:45:29 crc kubenswrapper[5003]: I0104 12:45:29.467642 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6trpn" Jan 04 12:45:29 crc kubenswrapper[5003]: I0104 12:45:29.540771 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6trpn" Jan 04 12:45:29 crc kubenswrapper[5003]: I0104 12:45:29.702174 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6trpn"] Jan 04 12:45:31 crc kubenswrapper[5003]: I0104 12:45:31.521593 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6trpn" podUID="328cf03f-6d61-499e-bac4-b9a9978695c2" containerName="registry-server" containerID="cri-o://a63bb8139721ac898514fe204a9e604685397607fbf603baae89a14f84c660da" gracePeriod=2 Jan 04 12:45:31 crc kubenswrapper[5003]: I0104 12:45:31.981541 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6trpn" Jan 04 12:45:32 crc kubenswrapper[5003]: I0104 12:45:32.106158 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/328cf03f-6d61-499e-bac4-b9a9978695c2-utilities\") pod \"328cf03f-6d61-499e-bac4-b9a9978695c2\" (UID: \"328cf03f-6d61-499e-bac4-b9a9978695c2\") " Jan 04 12:45:32 crc kubenswrapper[5003]: I0104 12:45:32.106697 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/328cf03f-6d61-499e-bac4-b9a9978695c2-catalog-content\") pod \"328cf03f-6d61-499e-bac4-b9a9978695c2\" (UID: \"328cf03f-6d61-499e-bac4-b9a9978695c2\") " Jan 04 12:45:32 crc kubenswrapper[5003]: I0104 12:45:32.106854 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6nppz\" (UniqueName: \"kubernetes.io/projected/328cf03f-6d61-499e-bac4-b9a9978695c2-kube-api-access-6nppz\") pod \"328cf03f-6d61-499e-bac4-b9a9978695c2\" (UID: \"328cf03f-6d61-499e-bac4-b9a9978695c2\") " Jan 04 12:45:32 crc kubenswrapper[5003]: I0104 12:45:32.107149 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/328cf03f-6d61-499e-bac4-b9a9978695c2-utilities" (OuterVolumeSpecName: "utilities") pod "328cf03f-6d61-499e-bac4-b9a9978695c2" (UID: "328cf03f-6d61-499e-bac4-b9a9978695c2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:45:32 crc kubenswrapper[5003]: I0104 12:45:32.113256 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/328cf03f-6d61-499e-bac4-b9a9978695c2-kube-api-access-6nppz" (OuterVolumeSpecName: "kube-api-access-6nppz") pod "328cf03f-6d61-499e-bac4-b9a9978695c2" (UID: "328cf03f-6d61-499e-bac4-b9a9978695c2"). InnerVolumeSpecName "kube-api-access-6nppz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:45:32 crc kubenswrapper[5003]: I0104 12:45:32.166494 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/328cf03f-6d61-499e-bac4-b9a9978695c2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "328cf03f-6d61-499e-bac4-b9a9978695c2" (UID: "328cf03f-6d61-499e-bac4-b9a9978695c2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:45:32 crc kubenswrapper[5003]: I0104 12:45:32.208600 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6nppz\" (UniqueName: \"kubernetes.io/projected/328cf03f-6d61-499e-bac4-b9a9978695c2-kube-api-access-6nppz\") on node \"crc\" DevicePath \"\"" Jan 04 12:45:32 crc kubenswrapper[5003]: I0104 12:45:32.208840 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/328cf03f-6d61-499e-bac4-b9a9978695c2-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:45:32 crc kubenswrapper[5003]: I0104 12:45:32.208900 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/328cf03f-6d61-499e-bac4-b9a9978695c2-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:45:32 crc kubenswrapper[5003]: I0104 12:45:32.534664 5003 generic.go:334] "Generic (PLEG): container finished" podID="328cf03f-6d61-499e-bac4-b9a9978695c2" containerID="a63bb8139721ac898514fe204a9e604685397607fbf603baae89a14f84c660da" exitCode=0 Jan 04 12:45:32 crc kubenswrapper[5003]: I0104 12:45:32.534749 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6trpn" event={"ID":"328cf03f-6d61-499e-bac4-b9a9978695c2","Type":"ContainerDied","Data":"a63bb8139721ac898514fe204a9e604685397607fbf603baae89a14f84c660da"} Jan 04 12:45:32 crc kubenswrapper[5003]: I0104 12:45:32.534815 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6trpn" event={"ID":"328cf03f-6d61-499e-bac4-b9a9978695c2","Type":"ContainerDied","Data":"6689bb72bdcd864ed415df26842a8ced53bb337e0f20bf6101473fa0198bea85"} Jan 04 12:45:32 crc kubenswrapper[5003]: I0104 12:45:32.534855 5003 scope.go:117] "RemoveContainer" containerID="a63bb8139721ac898514fe204a9e604685397607fbf603baae89a14f84c660da" Jan 04 12:45:32 crc kubenswrapper[5003]: I0104 12:45:32.536309 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6trpn" Jan 04 12:45:32 crc kubenswrapper[5003]: I0104 12:45:32.562220 5003 scope.go:117] "RemoveContainer" containerID="15f7c341f41d2bb7a0afb4994636d3b0cdd984a5fead1555c22939f40b93b438" Jan 04 12:45:32 crc kubenswrapper[5003]: I0104 12:45:32.605881 5003 scope.go:117] "RemoveContainer" containerID="b8e94aac611b60b7bce843b1ed87c0819e5e57563b5bf7ef66d83dff46ff5280" Jan 04 12:45:32 crc kubenswrapper[5003]: I0104 12:45:32.606757 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6trpn"] Jan 04 12:45:32 crc kubenswrapper[5003]: I0104 12:45:32.613496 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-6trpn"] Jan 04 12:45:32 crc kubenswrapper[5003]: I0104 12:45:32.619877 5003 scope.go:117] "RemoveContainer" containerID="a63bb8139721ac898514fe204a9e604685397607fbf603baae89a14f84c660da" Jan 04 12:45:32 crc kubenswrapper[5003]: E0104 12:45:32.620398 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a63bb8139721ac898514fe204a9e604685397607fbf603baae89a14f84c660da\": container with ID starting with a63bb8139721ac898514fe204a9e604685397607fbf603baae89a14f84c660da not found: ID does not exist" containerID="a63bb8139721ac898514fe204a9e604685397607fbf603baae89a14f84c660da" Jan 04 12:45:32 crc kubenswrapper[5003]: I0104 12:45:32.620463 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a63bb8139721ac898514fe204a9e604685397607fbf603baae89a14f84c660da"} err="failed to get container status \"a63bb8139721ac898514fe204a9e604685397607fbf603baae89a14f84c660da\": rpc error: code = NotFound desc = could not find container \"a63bb8139721ac898514fe204a9e604685397607fbf603baae89a14f84c660da\": container with ID starting with a63bb8139721ac898514fe204a9e604685397607fbf603baae89a14f84c660da not found: ID does not exist" Jan 04 12:45:32 crc kubenswrapper[5003]: I0104 12:45:32.620506 5003 scope.go:117] "RemoveContainer" containerID="15f7c341f41d2bb7a0afb4994636d3b0cdd984a5fead1555c22939f40b93b438" Jan 04 12:45:32 crc kubenswrapper[5003]: E0104 12:45:32.620851 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15f7c341f41d2bb7a0afb4994636d3b0cdd984a5fead1555c22939f40b93b438\": container with ID starting with 15f7c341f41d2bb7a0afb4994636d3b0cdd984a5fead1555c22939f40b93b438 not found: ID does not exist" containerID="15f7c341f41d2bb7a0afb4994636d3b0cdd984a5fead1555c22939f40b93b438" Jan 04 12:45:32 crc kubenswrapper[5003]: I0104 12:45:32.620890 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15f7c341f41d2bb7a0afb4994636d3b0cdd984a5fead1555c22939f40b93b438"} err="failed to get container status \"15f7c341f41d2bb7a0afb4994636d3b0cdd984a5fead1555c22939f40b93b438\": rpc error: code = NotFound desc = could not find container \"15f7c341f41d2bb7a0afb4994636d3b0cdd984a5fead1555c22939f40b93b438\": container with ID starting with 15f7c341f41d2bb7a0afb4994636d3b0cdd984a5fead1555c22939f40b93b438 not found: ID does not exist" Jan 04 12:45:32 crc kubenswrapper[5003]: I0104 12:45:32.620918 5003 scope.go:117] "RemoveContainer" containerID="b8e94aac611b60b7bce843b1ed87c0819e5e57563b5bf7ef66d83dff46ff5280" Jan 04 12:45:32 crc kubenswrapper[5003]: E0104 12:45:32.621211 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8e94aac611b60b7bce843b1ed87c0819e5e57563b5bf7ef66d83dff46ff5280\": container with ID starting with b8e94aac611b60b7bce843b1ed87c0819e5e57563b5bf7ef66d83dff46ff5280 not found: ID does not exist" containerID="b8e94aac611b60b7bce843b1ed87c0819e5e57563b5bf7ef66d83dff46ff5280" Jan 04 12:45:32 crc kubenswrapper[5003]: I0104 12:45:32.621254 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8e94aac611b60b7bce843b1ed87c0819e5e57563b5bf7ef66d83dff46ff5280"} err="failed to get container status \"b8e94aac611b60b7bce843b1ed87c0819e5e57563b5bf7ef66d83dff46ff5280\": rpc error: code = NotFound desc = could not find container \"b8e94aac611b60b7bce843b1ed87c0819e5e57563b5bf7ef66d83dff46ff5280\": container with ID starting with b8e94aac611b60b7bce843b1ed87c0819e5e57563b5bf7ef66d83dff46ff5280 not found: ID does not exist" Jan 04 12:45:32 crc kubenswrapper[5003]: E0104 12:45:32.636264 5003 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod328cf03f_6d61_499e_bac4_b9a9978695c2.slice\": RecentStats: unable to find data in memory cache]" Jan 04 12:45:32 crc kubenswrapper[5003]: I0104 12:45:32.816322 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="328cf03f-6d61-499e-bac4-b9a9978695c2" path="/var/lib/kubelet/pods/328cf03f-6d61-499e-bac4-b9a9978695c2/volumes" Jan 04 12:45:46 crc kubenswrapper[5003]: I0104 12:45:46.204548 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-j7tmk"] Jan 04 12:45:46 crc kubenswrapper[5003]: E0104 12:45:46.205726 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="328cf03f-6d61-499e-bac4-b9a9978695c2" containerName="extract-utilities" Jan 04 12:45:46 crc kubenswrapper[5003]: I0104 12:45:46.205751 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="328cf03f-6d61-499e-bac4-b9a9978695c2" containerName="extract-utilities" Jan 04 12:45:46 crc kubenswrapper[5003]: E0104 12:45:46.205779 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="328cf03f-6d61-499e-bac4-b9a9978695c2" containerName="extract-content" Jan 04 12:45:46 crc kubenswrapper[5003]: I0104 12:45:46.205789 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="328cf03f-6d61-499e-bac4-b9a9978695c2" containerName="extract-content" Jan 04 12:45:46 crc kubenswrapper[5003]: E0104 12:45:46.205810 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="328cf03f-6d61-499e-bac4-b9a9978695c2" containerName="registry-server" Jan 04 12:45:46 crc kubenswrapper[5003]: I0104 12:45:46.205821 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="328cf03f-6d61-499e-bac4-b9a9978695c2" containerName="registry-server" Jan 04 12:45:46 crc kubenswrapper[5003]: I0104 12:45:46.206079 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="328cf03f-6d61-499e-bac4-b9a9978695c2" containerName="registry-server" Jan 04 12:45:46 crc kubenswrapper[5003]: I0104 12:45:46.207676 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j7tmk" Jan 04 12:45:46 crc kubenswrapper[5003]: I0104 12:45:46.223594 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-j7tmk"] Jan 04 12:45:46 crc kubenswrapper[5003]: I0104 12:45:46.348828 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fad1a74b-ba90-41cc-9ff4-ad356c08b6c0-catalog-content\") pod \"redhat-marketplace-j7tmk\" (UID: \"fad1a74b-ba90-41cc-9ff4-ad356c08b6c0\") " pod="openshift-marketplace/redhat-marketplace-j7tmk" Jan 04 12:45:46 crc kubenswrapper[5003]: I0104 12:45:46.348887 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m857j\" (UniqueName: \"kubernetes.io/projected/fad1a74b-ba90-41cc-9ff4-ad356c08b6c0-kube-api-access-m857j\") pod \"redhat-marketplace-j7tmk\" (UID: \"fad1a74b-ba90-41cc-9ff4-ad356c08b6c0\") " pod="openshift-marketplace/redhat-marketplace-j7tmk" Jan 04 12:45:46 crc kubenswrapper[5003]: I0104 12:45:46.348914 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fad1a74b-ba90-41cc-9ff4-ad356c08b6c0-utilities\") pod \"redhat-marketplace-j7tmk\" (UID: \"fad1a74b-ba90-41cc-9ff4-ad356c08b6c0\") " pod="openshift-marketplace/redhat-marketplace-j7tmk" Jan 04 12:45:46 crc kubenswrapper[5003]: I0104 12:45:46.451912 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fad1a74b-ba90-41cc-9ff4-ad356c08b6c0-catalog-content\") pod \"redhat-marketplace-j7tmk\" (UID: \"fad1a74b-ba90-41cc-9ff4-ad356c08b6c0\") " pod="openshift-marketplace/redhat-marketplace-j7tmk" Jan 04 12:45:46 crc kubenswrapper[5003]: I0104 12:45:46.452191 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m857j\" (UniqueName: \"kubernetes.io/projected/fad1a74b-ba90-41cc-9ff4-ad356c08b6c0-kube-api-access-m857j\") pod \"redhat-marketplace-j7tmk\" (UID: \"fad1a74b-ba90-41cc-9ff4-ad356c08b6c0\") " pod="openshift-marketplace/redhat-marketplace-j7tmk" Jan 04 12:45:46 crc kubenswrapper[5003]: I0104 12:45:46.452570 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fad1a74b-ba90-41cc-9ff4-ad356c08b6c0-catalog-content\") pod \"redhat-marketplace-j7tmk\" (UID: \"fad1a74b-ba90-41cc-9ff4-ad356c08b6c0\") " pod="openshift-marketplace/redhat-marketplace-j7tmk" Jan 04 12:45:46 crc kubenswrapper[5003]: I0104 12:45:46.452863 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fad1a74b-ba90-41cc-9ff4-ad356c08b6c0-utilities\") pod \"redhat-marketplace-j7tmk\" (UID: \"fad1a74b-ba90-41cc-9ff4-ad356c08b6c0\") " pod="openshift-marketplace/redhat-marketplace-j7tmk" Jan 04 12:45:46 crc kubenswrapper[5003]: I0104 12:45:46.453601 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fad1a74b-ba90-41cc-9ff4-ad356c08b6c0-utilities\") pod \"redhat-marketplace-j7tmk\" (UID: \"fad1a74b-ba90-41cc-9ff4-ad356c08b6c0\") " pod="openshift-marketplace/redhat-marketplace-j7tmk" Jan 04 12:45:46 crc kubenswrapper[5003]: I0104 12:45:46.478297 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m857j\" (UniqueName: \"kubernetes.io/projected/fad1a74b-ba90-41cc-9ff4-ad356c08b6c0-kube-api-access-m857j\") pod \"redhat-marketplace-j7tmk\" (UID: \"fad1a74b-ba90-41cc-9ff4-ad356c08b6c0\") " pod="openshift-marketplace/redhat-marketplace-j7tmk" Jan 04 12:45:46 crc kubenswrapper[5003]: I0104 12:45:46.534730 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j7tmk" Jan 04 12:45:47 crc kubenswrapper[5003]: I0104 12:45:47.012764 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-j7tmk"] Jan 04 12:45:47 crc kubenswrapper[5003]: I0104 12:45:47.986493 5003 generic.go:334] "Generic (PLEG): container finished" podID="fad1a74b-ba90-41cc-9ff4-ad356c08b6c0" containerID="6c5803032a03c5882ca5484b8f5ed6e89b1a4e23a8e9ce5e0a4eb3a2b99fa0eb" exitCode=0 Jan 04 12:45:47 crc kubenswrapper[5003]: I0104 12:45:47.986624 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j7tmk" event={"ID":"fad1a74b-ba90-41cc-9ff4-ad356c08b6c0","Type":"ContainerDied","Data":"6c5803032a03c5882ca5484b8f5ed6e89b1a4e23a8e9ce5e0a4eb3a2b99fa0eb"} Jan 04 12:45:47 crc kubenswrapper[5003]: I0104 12:45:47.986769 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j7tmk" event={"ID":"fad1a74b-ba90-41cc-9ff4-ad356c08b6c0","Type":"ContainerStarted","Data":"5f01ccc57fc9621a6c3b54a0deec1a441a2c27b9b832538adf82f865b8bce9c7"} Jan 04 12:45:49 crc kubenswrapper[5003]: I0104 12:45:49.001385 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j7tmk" event={"ID":"fad1a74b-ba90-41cc-9ff4-ad356c08b6c0","Type":"ContainerStarted","Data":"4f4e68f57972a419236f3f9e91e340435638bca5b85817d535d6db99478c1faa"} Jan 04 12:45:50 crc kubenswrapper[5003]: I0104 12:45:50.011616 5003 generic.go:334] "Generic (PLEG): container finished" podID="fad1a74b-ba90-41cc-9ff4-ad356c08b6c0" containerID="4f4e68f57972a419236f3f9e91e340435638bca5b85817d535d6db99478c1faa" exitCode=0 Jan 04 12:45:50 crc kubenswrapper[5003]: I0104 12:45:50.011693 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j7tmk" event={"ID":"fad1a74b-ba90-41cc-9ff4-ad356c08b6c0","Type":"ContainerDied","Data":"4f4e68f57972a419236f3f9e91e340435638bca5b85817d535d6db99478c1faa"} Jan 04 12:45:51 crc kubenswrapper[5003]: I0104 12:45:51.023712 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j7tmk" event={"ID":"fad1a74b-ba90-41cc-9ff4-ad356c08b6c0","Type":"ContainerStarted","Data":"3c728f6bab3e1216c49ab5f0dfc3425adb82c5e47e7fd16231ce8f3490b8c04e"} Jan 04 12:45:51 crc kubenswrapper[5003]: I0104 12:45:51.049811 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-j7tmk" podStartSLOduration=2.472952448 podStartE2EDuration="5.049779475s" podCreationTimestamp="2026-01-04 12:45:46 +0000 UTC" firstStartedPulling="2026-01-04 12:45:47.988983111 +0000 UTC m=+3463.462012962" lastFinishedPulling="2026-01-04 12:45:50.565810108 +0000 UTC m=+3466.038839989" observedRunningTime="2026-01-04 12:45:51.047240991 +0000 UTC m=+3466.520270852" watchObservedRunningTime="2026-01-04 12:45:51.049779475 +0000 UTC m=+3466.522809356" Jan 04 12:45:56 crc kubenswrapper[5003]: I0104 12:45:56.535566 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-j7tmk" Jan 04 12:45:56 crc kubenswrapper[5003]: I0104 12:45:56.536304 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-j7tmk" Jan 04 12:45:56 crc kubenswrapper[5003]: I0104 12:45:56.598775 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-j7tmk" Jan 04 12:45:57 crc kubenswrapper[5003]: I0104 12:45:57.122101 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-j7tmk" Jan 04 12:45:57 crc kubenswrapper[5003]: I0104 12:45:57.174757 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-j7tmk"] Jan 04 12:45:59 crc kubenswrapper[5003]: I0104 12:45:59.099466 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-j7tmk" podUID="fad1a74b-ba90-41cc-9ff4-ad356c08b6c0" containerName="registry-server" containerID="cri-o://3c728f6bab3e1216c49ab5f0dfc3425adb82c5e47e7fd16231ce8f3490b8c04e" gracePeriod=2 Jan 04 12:46:00 crc kubenswrapper[5003]: I0104 12:46:00.109975 5003 generic.go:334] "Generic (PLEG): container finished" podID="fad1a74b-ba90-41cc-9ff4-ad356c08b6c0" containerID="3c728f6bab3e1216c49ab5f0dfc3425adb82c5e47e7fd16231ce8f3490b8c04e" exitCode=0 Jan 04 12:46:00 crc kubenswrapper[5003]: I0104 12:46:00.110058 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j7tmk" event={"ID":"fad1a74b-ba90-41cc-9ff4-ad356c08b6c0","Type":"ContainerDied","Data":"3c728f6bab3e1216c49ab5f0dfc3425adb82c5e47e7fd16231ce8f3490b8c04e"} Jan 04 12:46:00 crc kubenswrapper[5003]: I0104 12:46:00.110420 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j7tmk" event={"ID":"fad1a74b-ba90-41cc-9ff4-ad356c08b6c0","Type":"ContainerDied","Data":"5f01ccc57fc9621a6c3b54a0deec1a441a2c27b9b832538adf82f865b8bce9c7"} Jan 04 12:46:00 crc kubenswrapper[5003]: I0104 12:46:00.110441 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5f01ccc57fc9621a6c3b54a0deec1a441a2c27b9b832538adf82f865b8bce9c7" Jan 04 12:46:00 crc kubenswrapper[5003]: I0104 12:46:00.140211 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j7tmk" Jan 04 12:46:00 crc kubenswrapper[5003]: I0104 12:46:00.279858 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fad1a74b-ba90-41cc-9ff4-ad356c08b6c0-catalog-content\") pod \"fad1a74b-ba90-41cc-9ff4-ad356c08b6c0\" (UID: \"fad1a74b-ba90-41cc-9ff4-ad356c08b6c0\") " Jan 04 12:46:00 crc kubenswrapper[5003]: I0104 12:46:00.280200 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fad1a74b-ba90-41cc-9ff4-ad356c08b6c0-utilities\") pod \"fad1a74b-ba90-41cc-9ff4-ad356c08b6c0\" (UID: \"fad1a74b-ba90-41cc-9ff4-ad356c08b6c0\") " Jan 04 12:46:00 crc kubenswrapper[5003]: I0104 12:46:00.280275 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m857j\" (UniqueName: \"kubernetes.io/projected/fad1a74b-ba90-41cc-9ff4-ad356c08b6c0-kube-api-access-m857j\") pod \"fad1a74b-ba90-41cc-9ff4-ad356c08b6c0\" (UID: \"fad1a74b-ba90-41cc-9ff4-ad356c08b6c0\") " Jan 04 12:46:00 crc kubenswrapper[5003]: I0104 12:46:00.281119 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fad1a74b-ba90-41cc-9ff4-ad356c08b6c0-utilities" (OuterVolumeSpecName: "utilities") pod "fad1a74b-ba90-41cc-9ff4-ad356c08b6c0" (UID: "fad1a74b-ba90-41cc-9ff4-ad356c08b6c0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:46:00 crc kubenswrapper[5003]: I0104 12:46:00.286880 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fad1a74b-ba90-41cc-9ff4-ad356c08b6c0-kube-api-access-m857j" (OuterVolumeSpecName: "kube-api-access-m857j") pod "fad1a74b-ba90-41cc-9ff4-ad356c08b6c0" (UID: "fad1a74b-ba90-41cc-9ff4-ad356c08b6c0"). InnerVolumeSpecName "kube-api-access-m857j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:46:00 crc kubenswrapper[5003]: I0104 12:46:00.303845 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fad1a74b-ba90-41cc-9ff4-ad356c08b6c0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fad1a74b-ba90-41cc-9ff4-ad356c08b6c0" (UID: "fad1a74b-ba90-41cc-9ff4-ad356c08b6c0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:46:00 crc kubenswrapper[5003]: I0104 12:46:00.381872 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fad1a74b-ba90-41cc-9ff4-ad356c08b6c0-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:46:00 crc kubenswrapper[5003]: I0104 12:46:00.381919 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m857j\" (UniqueName: \"kubernetes.io/projected/fad1a74b-ba90-41cc-9ff4-ad356c08b6c0-kube-api-access-m857j\") on node \"crc\" DevicePath \"\"" Jan 04 12:46:00 crc kubenswrapper[5003]: I0104 12:46:00.381930 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fad1a74b-ba90-41cc-9ff4-ad356c08b6c0-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:46:01 crc kubenswrapper[5003]: I0104 12:46:01.122235 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j7tmk" Jan 04 12:46:01 crc kubenswrapper[5003]: I0104 12:46:01.165661 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-j7tmk"] Jan 04 12:46:01 crc kubenswrapper[5003]: I0104 12:46:01.173662 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-j7tmk"] Jan 04 12:46:02 crc kubenswrapper[5003]: I0104 12:46:02.822830 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fad1a74b-ba90-41cc-9ff4-ad356c08b6c0" path="/var/lib/kubelet/pods/fad1a74b-ba90-41cc-9ff4-ad356c08b6c0/volumes" Jan 04 12:46:37 crc kubenswrapper[5003]: I0104 12:46:37.610078 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-ck6w2"] Jan 04 12:46:37 crc kubenswrapper[5003]: E0104 12:46:37.611007 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fad1a74b-ba90-41cc-9ff4-ad356c08b6c0" containerName="extract-utilities" Jan 04 12:46:37 crc kubenswrapper[5003]: I0104 12:46:37.611041 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="fad1a74b-ba90-41cc-9ff4-ad356c08b6c0" containerName="extract-utilities" Jan 04 12:46:37 crc kubenswrapper[5003]: E0104 12:46:37.611055 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fad1a74b-ba90-41cc-9ff4-ad356c08b6c0" containerName="extract-content" Jan 04 12:46:37 crc kubenswrapper[5003]: I0104 12:46:37.611063 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="fad1a74b-ba90-41cc-9ff4-ad356c08b6c0" containerName="extract-content" Jan 04 12:46:37 crc kubenswrapper[5003]: E0104 12:46:37.611077 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fad1a74b-ba90-41cc-9ff4-ad356c08b6c0" containerName="registry-server" Jan 04 12:46:37 crc kubenswrapper[5003]: I0104 12:46:37.611084 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="fad1a74b-ba90-41cc-9ff4-ad356c08b6c0" containerName="registry-server" Jan 04 12:46:37 crc kubenswrapper[5003]: I0104 12:46:37.611262 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="fad1a74b-ba90-41cc-9ff4-ad356c08b6c0" containerName="registry-server" Jan 04 12:46:37 crc kubenswrapper[5003]: I0104 12:46:37.614056 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ck6w2" Jan 04 12:46:37 crc kubenswrapper[5003]: I0104 12:46:37.634337 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ck6w2"] Jan 04 12:46:37 crc kubenswrapper[5003]: I0104 12:46:37.679007 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1daf2b4-41b6-4491-8285-4d7f136f1705-catalog-content\") pod \"redhat-operators-ck6w2\" (UID: \"d1daf2b4-41b6-4491-8285-4d7f136f1705\") " pod="openshift-marketplace/redhat-operators-ck6w2" Jan 04 12:46:37 crc kubenswrapper[5003]: I0104 12:46:37.679114 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7l8j\" (UniqueName: \"kubernetes.io/projected/d1daf2b4-41b6-4491-8285-4d7f136f1705-kube-api-access-l7l8j\") pod \"redhat-operators-ck6w2\" (UID: \"d1daf2b4-41b6-4491-8285-4d7f136f1705\") " pod="openshift-marketplace/redhat-operators-ck6w2" Jan 04 12:46:37 crc kubenswrapper[5003]: I0104 12:46:37.679220 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1daf2b4-41b6-4491-8285-4d7f136f1705-utilities\") pod \"redhat-operators-ck6w2\" (UID: \"d1daf2b4-41b6-4491-8285-4d7f136f1705\") " pod="openshift-marketplace/redhat-operators-ck6w2" Jan 04 12:46:37 crc kubenswrapper[5003]: I0104 12:46:37.780528 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1daf2b4-41b6-4491-8285-4d7f136f1705-catalog-content\") pod \"redhat-operators-ck6w2\" (UID: \"d1daf2b4-41b6-4491-8285-4d7f136f1705\") " pod="openshift-marketplace/redhat-operators-ck6w2" Jan 04 12:46:37 crc kubenswrapper[5003]: I0104 12:46:37.780603 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7l8j\" (UniqueName: \"kubernetes.io/projected/d1daf2b4-41b6-4491-8285-4d7f136f1705-kube-api-access-l7l8j\") pod \"redhat-operators-ck6w2\" (UID: \"d1daf2b4-41b6-4491-8285-4d7f136f1705\") " pod="openshift-marketplace/redhat-operators-ck6w2" Jan 04 12:46:37 crc kubenswrapper[5003]: I0104 12:46:37.780689 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1daf2b4-41b6-4491-8285-4d7f136f1705-utilities\") pod \"redhat-operators-ck6w2\" (UID: \"d1daf2b4-41b6-4491-8285-4d7f136f1705\") " pod="openshift-marketplace/redhat-operators-ck6w2" Jan 04 12:46:37 crc kubenswrapper[5003]: I0104 12:46:37.781196 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1daf2b4-41b6-4491-8285-4d7f136f1705-catalog-content\") pod \"redhat-operators-ck6w2\" (UID: \"d1daf2b4-41b6-4491-8285-4d7f136f1705\") " pod="openshift-marketplace/redhat-operators-ck6w2" Jan 04 12:46:37 crc kubenswrapper[5003]: I0104 12:46:37.781404 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1daf2b4-41b6-4491-8285-4d7f136f1705-utilities\") pod \"redhat-operators-ck6w2\" (UID: \"d1daf2b4-41b6-4491-8285-4d7f136f1705\") " pod="openshift-marketplace/redhat-operators-ck6w2" Jan 04 12:46:37 crc kubenswrapper[5003]: I0104 12:46:37.801264 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7l8j\" (UniqueName: \"kubernetes.io/projected/d1daf2b4-41b6-4491-8285-4d7f136f1705-kube-api-access-l7l8j\") pod \"redhat-operators-ck6w2\" (UID: \"d1daf2b4-41b6-4491-8285-4d7f136f1705\") " pod="openshift-marketplace/redhat-operators-ck6w2" Jan 04 12:46:37 crc kubenswrapper[5003]: I0104 12:46:37.943305 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ck6w2" Jan 04 12:46:38 crc kubenswrapper[5003]: I0104 12:46:38.389362 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ck6w2"] Jan 04 12:46:38 crc kubenswrapper[5003]: I0104 12:46:38.435628 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ck6w2" event={"ID":"d1daf2b4-41b6-4491-8285-4d7f136f1705","Type":"ContainerStarted","Data":"800e048003136d5062e93d75a4180aa575f5bf5a84449dddd1f190df1f10e842"} Jan 04 12:46:39 crc kubenswrapper[5003]: I0104 12:46:39.447210 5003 generic.go:334] "Generic (PLEG): container finished" podID="d1daf2b4-41b6-4491-8285-4d7f136f1705" containerID="0fda8ada55fcdc3c5baf7ce8c0cf49ac19ef79cc02d1d119d5732ce882115360" exitCode=0 Jan 04 12:46:39 crc kubenswrapper[5003]: I0104 12:46:39.447306 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ck6w2" event={"ID":"d1daf2b4-41b6-4491-8285-4d7f136f1705","Type":"ContainerDied","Data":"0fda8ada55fcdc3c5baf7ce8c0cf49ac19ef79cc02d1d119d5732ce882115360"} Jan 04 12:46:40 crc kubenswrapper[5003]: I0104 12:46:40.456866 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ck6w2" event={"ID":"d1daf2b4-41b6-4491-8285-4d7f136f1705","Type":"ContainerStarted","Data":"a1ba0a87c7cf799480abaf02755c422394c825c31e3abc85e1afe6508452f928"} Jan 04 12:46:41 crc kubenswrapper[5003]: I0104 12:46:41.465708 5003 generic.go:334] "Generic (PLEG): container finished" podID="d1daf2b4-41b6-4491-8285-4d7f136f1705" containerID="a1ba0a87c7cf799480abaf02755c422394c825c31e3abc85e1afe6508452f928" exitCode=0 Jan 04 12:46:41 crc kubenswrapper[5003]: I0104 12:46:41.465979 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ck6w2" event={"ID":"d1daf2b4-41b6-4491-8285-4d7f136f1705","Type":"ContainerDied","Data":"a1ba0a87c7cf799480abaf02755c422394c825c31e3abc85e1afe6508452f928"} Jan 04 12:46:42 crc kubenswrapper[5003]: I0104 12:46:42.476684 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ck6w2" event={"ID":"d1daf2b4-41b6-4491-8285-4d7f136f1705","Type":"ContainerStarted","Data":"fa959880c63a268c791f24906e1fe9e137d55e3b753425f9cf0398201b726783"} Jan 04 12:46:42 crc kubenswrapper[5003]: I0104 12:46:42.504514 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-ck6w2" podStartSLOduration=2.924144336 podStartE2EDuration="5.504487014s" podCreationTimestamp="2026-01-04 12:46:37 +0000 UTC" firstStartedPulling="2026-01-04 12:46:39.44948439 +0000 UTC m=+3514.922514271" lastFinishedPulling="2026-01-04 12:46:42.029827118 +0000 UTC m=+3517.502856949" observedRunningTime="2026-01-04 12:46:42.503360332 +0000 UTC m=+3517.976390243" watchObservedRunningTime="2026-01-04 12:46:42.504487014 +0000 UTC m=+3517.977516875" Jan 04 12:46:47 crc kubenswrapper[5003]: I0104 12:46:47.944066 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-ck6w2" Jan 04 12:46:47 crc kubenswrapper[5003]: I0104 12:46:47.944639 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-ck6w2" Jan 04 12:46:47 crc kubenswrapper[5003]: I0104 12:46:47.997657 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-ck6w2" Jan 04 12:46:48 crc kubenswrapper[5003]: I0104 12:46:48.565092 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-ck6w2" Jan 04 12:46:48 crc kubenswrapper[5003]: I0104 12:46:48.612937 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ck6w2"] Jan 04 12:46:50 crc kubenswrapper[5003]: I0104 12:46:50.536379 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-ck6w2" podUID="d1daf2b4-41b6-4491-8285-4d7f136f1705" containerName="registry-server" containerID="cri-o://fa959880c63a268c791f24906e1fe9e137d55e3b753425f9cf0398201b726783" gracePeriod=2 Jan 04 12:46:52 crc kubenswrapper[5003]: I0104 12:46:52.554072 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ck6w2" event={"ID":"d1daf2b4-41b6-4491-8285-4d7f136f1705","Type":"ContainerDied","Data":"fa959880c63a268c791f24906e1fe9e137d55e3b753425f9cf0398201b726783"} Jan 04 12:46:52 crc kubenswrapper[5003]: I0104 12:46:52.554086 5003 generic.go:334] "Generic (PLEG): container finished" podID="d1daf2b4-41b6-4491-8285-4d7f136f1705" containerID="fa959880c63a268c791f24906e1fe9e137d55e3b753425f9cf0398201b726783" exitCode=0 Jan 04 12:46:52 crc kubenswrapper[5003]: I0104 12:46:52.934301 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ck6w2" Jan 04 12:46:53 crc kubenswrapper[5003]: I0104 12:46:53.012750 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1daf2b4-41b6-4491-8285-4d7f136f1705-catalog-content\") pod \"d1daf2b4-41b6-4491-8285-4d7f136f1705\" (UID: \"d1daf2b4-41b6-4491-8285-4d7f136f1705\") " Jan 04 12:46:53 crc kubenswrapper[5003]: I0104 12:46:53.012833 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1daf2b4-41b6-4491-8285-4d7f136f1705-utilities\") pod \"d1daf2b4-41b6-4491-8285-4d7f136f1705\" (UID: \"d1daf2b4-41b6-4491-8285-4d7f136f1705\") " Jan 04 12:46:53 crc kubenswrapper[5003]: I0104 12:46:53.012943 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l7l8j\" (UniqueName: \"kubernetes.io/projected/d1daf2b4-41b6-4491-8285-4d7f136f1705-kube-api-access-l7l8j\") pod \"d1daf2b4-41b6-4491-8285-4d7f136f1705\" (UID: \"d1daf2b4-41b6-4491-8285-4d7f136f1705\") " Jan 04 12:46:53 crc kubenswrapper[5003]: I0104 12:46:53.014093 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d1daf2b4-41b6-4491-8285-4d7f136f1705-utilities" (OuterVolumeSpecName: "utilities") pod "d1daf2b4-41b6-4491-8285-4d7f136f1705" (UID: "d1daf2b4-41b6-4491-8285-4d7f136f1705"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:46:53 crc kubenswrapper[5003]: I0104 12:46:53.019762 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1daf2b4-41b6-4491-8285-4d7f136f1705-kube-api-access-l7l8j" (OuterVolumeSpecName: "kube-api-access-l7l8j") pod "d1daf2b4-41b6-4491-8285-4d7f136f1705" (UID: "d1daf2b4-41b6-4491-8285-4d7f136f1705"). InnerVolumeSpecName "kube-api-access-l7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:46:53 crc kubenswrapper[5003]: I0104 12:46:53.114278 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l7l8j\" (UniqueName: \"kubernetes.io/projected/d1daf2b4-41b6-4491-8285-4d7f136f1705-kube-api-access-l7l8j\") on node \"crc\" DevicePath \"\"" Jan 04 12:46:53 crc kubenswrapper[5003]: I0104 12:46:53.114316 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d1daf2b4-41b6-4491-8285-4d7f136f1705-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:46:53 crc kubenswrapper[5003]: I0104 12:46:53.161239 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d1daf2b4-41b6-4491-8285-4d7f136f1705-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d1daf2b4-41b6-4491-8285-4d7f136f1705" (UID: "d1daf2b4-41b6-4491-8285-4d7f136f1705"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:46:53 crc kubenswrapper[5003]: I0104 12:46:53.215426 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d1daf2b4-41b6-4491-8285-4d7f136f1705-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:46:53 crc kubenswrapper[5003]: I0104 12:46:53.567652 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ck6w2" event={"ID":"d1daf2b4-41b6-4491-8285-4d7f136f1705","Type":"ContainerDied","Data":"800e048003136d5062e93d75a4180aa575f5bf5a84449dddd1f190df1f10e842"} Jan 04 12:46:53 crc kubenswrapper[5003]: I0104 12:46:53.567757 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ck6w2" Jan 04 12:46:53 crc kubenswrapper[5003]: I0104 12:46:53.567762 5003 scope.go:117] "RemoveContainer" containerID="fa959880c63a268c791f24906e1fe9e137d55e3b753425f9cf0398201b726783" Jan 04 12:46:53 crc kubenswrapper[5003]: I0104 12:46:53.599225 5003 scope.go:117] "RemoveContainer" containerID="a1ba0a87c7cf799480abaf02755c422394c825c31e3abc85e1afe6508452f928" Jan 04 12:46:53 crc kubenswrapper[5003]: I0104 12:46:53.621974 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ck6w2"] Jan 04 12:46:53 crc kubenswrapper[5003]: I0104 12:46:53.631871 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-ck6w2"] Jan 04 12:46:53 crc kubenswrapper[5003]: I0104 12:46:53.658637 5003 scope.go:117] "RemoveContainer" containerID="0fda8ada55fcdc3c5baf7ce8c0cf49ac19ef79cc02d1d119d5732ce882115360" Jan 04 12:46:54 crc kubenswrapper[5003]: I0104 12:46:54.823195 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1daf2b4-41b6-4491-8285-4d7f136f1705" path="/var/lib/kubelet/pods/d1daf2b4-41b6-4491-8285-4d7f136f1705/volumes" Jan 04 12:47:09 crc kubenswrapper[5003]: I0104 12:47:09.419234 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:47:09 crc kubenswrapper[5003]: I0104 12:47:09.419816 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:47:39 crc kubenswrapper[5003]: I0104 12:47:39.418956 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:47:39 crc kubenswrapper[5003]: I0104 12:47:39.419587 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:48:09 crc kubenswrapper[5003]: I0104 12:48:09.418650 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:48:09 crc kubenswrapper[5003]: I0104 12:48:09.419351 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:48:09 crc kubenswrapper[5003]: I0104 12:48:09.419421 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 12:48:09 crc kubenswrapper[5003]: I0104 12:48:09.420275 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8d0857e0bb3abdaa409575389676fe5a44a7fd3ee14dc99a80c7ebfd877a8c32"} pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 12:48:09 crc kubenswrapper[5003]: I0104 12:48:09.420357 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" containerID="cri-o://8d0857e0bb3abdaa409575389676fe5a44a7fd3ee14dc99a80c7ebfd877a8c32" gracePeriod=600 Jan 04 12:48:09 crc kubenswrapper[5003]: E0104 12:48:09.559456 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:48:10 crc kubenswrapper[5003]: I0104 12:48:10.242998 5003 generic.go:334] "Generic (PLEG): container finished" podID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerID="8d0857e0bb3abdaa409575389676fe5a44a7fd3ee14dc99a80c7ebfd877a8c32" exitCode=0 Jan 04 12:48:10 crc kubenswrapper[5003]: I0104 12:48:10.243070 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerDied","Data":"8d0857e0bb3abdaa409575389676fe5a44a7fd3ee14dc99a80c7ebfd877a8c32"} Jan 04 12:48:10 crc kubenswrapper[5003]: I0104 12:48:10.243124 5003 scope.go:117] "RemoveContainer" containerID="75b2468fe392af0f5ba5e903625df0d783eabdb12a668e937157d68bb525adb8" Jan 04 12:48:10 crc kubenswrapper[5003]: I0104 12:48:10.244312 5003 scope.go:117] "RemoveContainer" containerID="8d0857e0bb3abdaa409575389676fe5a44a7fd3ee14dc99a80c7ebfd877a8c32" Jan 04 12:48:10 crc kubenswrapper[5003]: E0104 12:48:10.244573 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:48:22 crc kubenswrapper[5003]: I0104 12:48:22.807792 5003 scope.go:117] "RemoveContainer" containerID="8d0857e0bb3abdaa409575389676fe5a44a7fd3ee14dc99a80c7ebfd877a8c32" Jan 04 12:48:22 crc kubenswrapper[5003]: E0104 12:48:22.809995 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:48:33 crc kubenswrapper[5003]: I0104 12:48:33.808012 5003 scope.go:117] "RemoveContainer" containerID="8d0857e0bb3abdaa409575389676fe5a44a7fd3ee14dc99a80c7ebfd877a8c32" Jan 04 12:48:33 crc kubenswrapper[5003]: E0104 12:48:33.810786 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:48:46 crc kubenswrapper[5003]: I0104 12:48:46.806281 5003 scope.go:117] "RemoveContainer" containerID="8d0857e0bb3abdaa409575389676fe5a44a7fd3ee14dc99a80c7ebfd877a8c32" Jan 04 12:48:46 crc kubenswrapper[5003]: E0104 12:48:46.807321 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:48:59 crc kubenswrapper[5003]: I0104 12:48:59.807146 5003 scope.go:117] "RemoveContainer" containerID="8d0857e0bb3abdaa409575389676fe5a44a7fd3ee14dc99a80c7ebfd877a8c32" Jan 04 12:48:59 crc kubenswrapper[5003]: E0104 12:48:59.808192 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:49:13 crc kubenswrapper[5003]: I0104 12:49:13.806726 5003 scope.go:117] "RemoveContainer" containerID="8d0857e0bb3abdaa409575389676fe5a44a7fd3ee14dc99a80c7ebfd877a8c32" Jan 04 12:49:13 crc kubenswrapper[5003]: E0104 12:49:13.807445 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:49:27 crc kubenswrapper[5003]: I0104 12:49:27.807860 5003 scope.go:117] "RemoveContainer" containerID="8d0857e0bb3abdaa409575389676fe5a44a7fd3ee14dc99a80c7ebfd877a8c32" Jan 04 12:49:27 crc kubenswrapper[5003]: E0104 12:49:27.809248 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:49:39 crc kubenswrapper[5003]: I0104 12:49:39.806220 5003 scope.go:117] "RemoveContainer" containerID="8d0857e0bb3abdaa409575389676fe5a44a7fd3ee14dc99a80c7ebfd877a8c32" Jan 04 12:49:39 crc kubenswrapper[5003]: E0104 12:49:39.806914 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:49:54 crc kubenswrapper[5003]: I0104 12:49:54.811308 5003 scope.go:117] "RemoveContainer" containerID="8d0857e0bb3abdaa409575389676fe5a44a7fd3ee14dc99a80c7ebfd877a8c32" Jan 04 12:49:54 crc kubenswrapper[5003]: E0104 12:49:54.812495 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:50:07 crc kubenswrapper[5003]: I0104 12:50:07.808272 5003 scope.go:117] "RemoveContainer" containerID="8d0857e0bb3abdaa409575389676fe5a44a7fd3ee14dc99a80c7ebfd877a8c32" Jan 04 12:50:07 crc kubenswrapper[5003]: E0104 12:50:07.810118 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:50:18 crc kubenswrapper[5003]: I0104 12:50:18.807671 5003 scope.go:117] "RemoveContainer" containerID="8d0857e0bb3abdaa409575389676fe5a44a7fd3ee14dc99a80c7ebfd877a8c32" Jan 04 12:50:18 crc kubenswrapper[5003]: E0104 12:50:18.808896 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:50:33 crc kubenswrapper[5003]: I0104 12:50:33.807249 5003 scope.go:117] "RemoveContainer" containerID="8d0857e0bb3abdaa409575389676fe5a44a7fd3ee14dc99a80c7ebfd877a8c32" Jan 04 12:50:33 crc kubenswrapper[5003]: E0104 12:50:33.808894 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:50:47 crc kubenswrapper[5003]: I0104 12:50:47.807726 5003 scope.go:117] "RemoveContainer" containerID="8d0857e0bb3abdaa409575389676fe5a44a7fd3ee14dc99a80c7ebfd877a8c32" Jan 04 12:50:47 crc kubenswrapper[5003]: E0104 12:50:47.808777 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:51:00 crc kubenswrapper[5003]: I0104 12:51:00.807407 5003 scope.go:117] "RemoveContainer" containerID="8d0857e0bb3abdaa409575389676fe5a44a7fd3ee14dc99a80c7ebfd877a8c32" Jan 04 12:51:00 crc kubenswrapper[5003]: E0104 12:51:00.808492 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:51:14 crc kubenswrapper[5003]: I0104 12:51:14.811634 5003 scope.go:117] "RemoveContainer" containerID="8d0857e0bb3abdaa409575389676fe5a44a7fd3ee14dc99a80c7ebfd877a8c32" Jan 04 12:51:14 crc kubenswrapper[5003]: E0104 12:51:14.813906 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:51:26 crc kubenswrapper[5003]: I0104 12:51:26.807683 5003 scope.go:117] "RemoveContainer" containerID="8d0857e0bb3abdaa409575389676fe5a44a7fd3ee14dc99a80c7ebfd877a8c32" Jan 04 12:51:26 crc kubenswrapper[5003]: E0104 12:51:26.809390 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:51:38 crc kubenswrapper[5003]: I0104 12:51:38.807538 5003 scope.go:117] "RemoveContainer" containerID="8d0857e0bb3abdaa409575389676fe5a44a7fd3ee14dc99a80c7ebfd877a8c32" Jan 04 12:51:38 crc kubenswrapper[5003]: E0104 12:51:38.808669 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:51:49 crc kubenswrapper[5003]: I0104 12:51:49.807155 5003 scope.go:117] "RemoveContainer" containerID="8d0857e0bb3abdaa409575389676fe5a44a7fd3ee14dc99a80c7ebfd877a8c32" Jan 04 12:51:49 crc kubenswrapper[5003]: E0104 12:51:49.808112 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:52:00 crc kubenswrapper[5003]: I0104 12:52:00.807071 5003 scope.go:117] "RemoveContainer" containerID="8d0857e0bb3abdaa409575389676fe5a44a7fd3ee14dc99a80c7ebfd877a8c32" Jan 04 12:52:00 crc kubenswrapper[5003]: E0104 12:52:00.807812 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:52:13 crc kubenswrapper[5003]: I0104 12:52:13.627926 5003 scope.go:117] "RemoveContainer" containerID="3c728f6bab3e1216c49ab5f0dfc3425adb82c5e47e7fd16231ce8f3490b8c04e" Jan 04 12:52:13 crc kubenswrapper[5003]: I0104 12:52:13.654989 5003 scope.go:117] "RemoveContainer" containerID="6c5803032a03c5882ca5484b8f5ed6e89b1a4e23a8e9ce5e0a4eb3a2b99fa0eb" Jan 04 12:52:13 crc kubenswrapper[5003]: I0104 12:52:13.675506 5003 scope.go:117] "RemoveContainer" containerID="4f4e68f57972a419236f3f9e91e340435638bca5b85817d535d6db99478c1faa" Jan 04 12:52:15 crc kubenswrapper[5003]: I0104 12:52:15.807646 5003 scope.go:117] "RemoveContainer" containerID="8d0857e0bb3abdaa409575389676fe5a44a7fd3ee14dc99a80c7ebfd877a8c32" Jan 04 12:52:15 crc kubenswrapper[5003]: E0104 12:52:15.808453 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:52:28 crc kubenswrapper[5003]: I0104 12:52:28.807049 5003 scope.go:117] "RemoveContainer" containerID="8d0857e0bb3abdaa409575389676fe5a44a7fd3ee14dc99a80c7ebfd877a8c32" Jan 04 12:52:28 crc kubenswrapper[5003]: E0104 12:52:28.808036 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:52:39 crc kubenswrapper[5003]: I0104 12:52:39.806775 5003 scope.go:117] "RemoveContainer" containerID="8d0857e0bb3abdaa409575389676fe5a44a7fd3ee14dc99a80c7ebfd877a8c32" Jan 04 12:52:39 crc kubenswrapper[5003]: E0104 12:52:39.807681 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:52:54 crc kubenswrapper[5003]: I0104 12:52:54.815725 5003 scope.go:117] "RemoveContainer" containerID="8d0857e0bb3abdaa409575389676fe5a44a7fd3ee14dc99a80c7ebfd877a8c32" Jan 04 12:52:54 crc kubenswrapper[5003]: E0104 12:52:54.817457 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:53:08 crc kubenswrapper[5003]: I0104 12:53:08.807453 5003 scope.go:117] "RemoveContainer" containerID="8d0857e0bb3abdaa409575389676fe5a44a7fd3ee14dc99a80c7ebfd877a8c32" Jan 04 12:53:08 crc kubenswrapper[5003]: E0104 12:53:08.808356 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:53:16 crc kubenswrapper[5003]: I0104 12:53:16.275312 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6f8kq"] Jan 04 12:53:16 crc kubenswrapper[5003]: E0104 12:53:16.276255 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1daf2b4-41b6-4491-8285-4d7f136f1705" containerName="extract-content" Jan 04 12:53:16 crc kubenswrapper[5003]: I0104 12:53:16.276271 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1daf2b4-41b6-4491-8285-4d7f136f1705" containerName="extract-content" Jan 04 12:53:16 crc kubenswrapper[5003]: E0104 12:53:16.276297 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1daf2b4-41b6-4491-8285-4d7f136f1705" containerName="registry-server" Jan 04 12:53:16 crc kubenswrapper[5003]: I0104 12:53:16.276305 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1daf2b4-41b6-4491-8285-4d7f136f1705" containerName="registry-server" Jan 04 12:53:16 crc kubenswrapper[5003]: E0104 12:53:16.276316 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1daf2b4-41b6-4491-8285-4d7f136f1705" containerName="extract-utilities" Jan 04 12:53:16 crc kubenswrapper[5003]: I0104 12:53:16.276324 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1daf2b4-41b6-4491-8285-4d7f136f1705" containerName="extract-utilities" Jan 04 12:53:16 crc kubenswrapper[5003]: I0104 12:53:16.276507 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1daf2b4-41b6-4491-8285-4d7f136f1705" containerName="registry-server" Jan 04 12:53:16 crc kubenswrapper[5003]: I0104 12:53:16.277736 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6f8kq" Jan 04 12:53:16 crc kubenswrapper[5003]: I0104 12:53:16.295644 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6f8kq"] Jan 04 12:53:16 crc kubenswrapper[5003]: I0104 12:53:16.376532 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20147cbe-0709-4360-89e9-4c337ffb3dc2-utilities\") pod \"certified-operators-6f8kq\" (UID: \"20147cbe-0709-4360-89e9-4c337ffb3dc2\") " pod="openshift-marketplace/certified-operators-6f8kq" Jan 04 12:53:16 crc kubenswrapper[5003]: I0104 12:53:16.376595 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s58xb\" (UniqueName: \"kubernetes.io/projected/20147cbe-0709-4360-89e9-4c337ffb3dc2-kube-api-access-s58xb\") pod \"certified-operators-6f8kq\" (UID: \"20147cbe-0709-4360-89e9-4c337ffb3dc2\") " pod="openshift-marketplace/certified-operators-6f8kq" Jan 04 12:53:16 crc kubenswrapper[5003]: I0104 12:53:16.376627 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20147cbe-0709-4360-89e9-4c337ffb3dc2-catalog-content\") pod \"certified-operators-6f8kq\" (UID: \"20147cbe-0709-4360-89e9-4c337ffb3dc2\") " pod="openshift-marketplace/certified-operators-6f8kq" Jan 04 12:53:16 crc kubenswrapper[5003]: I0104 12:53:16.477877 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20147cbe-0709-4360-89e9-4c337ffb3dc2-utilities\") pod \"certified-operators-6f8kq\" (UID: \"20147cbe-0709-4360-89e9-4c337ffb3dc2\") " pod="openshift-marketplace/certified-operators-6f8kq" Jan 04 12:53:16 crc kubenswrapper[5003]: I0104 12:53:16.478296 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s58xb\" (UniqueName: \"kubernetes.io/projected/20147cbe-0709-4360-89e9-4c337ffb3dc2-kube-api-access-s58xb\") pod \"certified-operators-6f8kq\" (UID: \"20147cbe-0709-4360-89e9-4c337ffb3dc2\") " pod="openshift-marketplace/certified-operators-6f8kq" Jan 04 12:53:16 crc kubenswrapper[5003]: I0104 12:53:16.478329 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20147cbe-0709-4360-89e9-4c337ffb3dc2-catalog-content\") pod \"certified-operators-6f8kq\" (UID: \"20147cbe-0709-4360-89e9-4c337ffb3dc2\") " pod="openshift-marketplace/certified-operators-6f8kq" Jan 04 12:53:16 crc kubenswrapper[5003]: I0104 12:53:16.478369 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20147cbe-0709-4360-89e9-4c337ffb3dc2-utilities\") pod \"certified-operators-6f8kq\" (UID: \"20147cbe-0709-4360-89e9-4c337ffb3dc2\") " pod="openshift-marketplace/certified-operators-6f8kq" Jan 04 12:53:16 crc kubenswrapper[5003]: I0104 12:53:16.478619 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20147cbe-0709-4360-89e9-4c337ffb3dc2-catalog-content\") pod \"certified-operators-6f8kq\" (UID: \"20147cbe-0709-4360-89e9-4c337ffb3dc2\") " pod="openshift-marketplace/certified-operators-6f8kq" Jan 04 12:53:16 crc kubenswrapper[5003]: I0104 12:53:16.498215 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s58xb\" (UniqueName: \"kubernetes.io/projected/20147cbe-0709-4360-89e9-4c337ffb3dc2-kube-api-access-s58xb\") pod \"certified-operators-6f8kq\" (UID: \"20147cbe-0709-4360-89e9-4c337ffb3dc2\") " pod="openshift-marketplace/certified-operators-6f8kq" Jan 04 12:53:16 crc kubenswrapper[5003]: I0104 12:53:16.607549 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6f8kq" Jan 04 12:53:16 crc kubenswrapper[5003]: I0104 12:53:16.870407 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6f8kq"] Jan 04 12:53:17 crc kubenswrapper[5003]: I0104 12:53:17.169554 5003 generic.go:334] "Generic (PLEG): container finished" podID="20147cbe-0709-4360-89e9-4c337ffb3dc2" containerID="dc22075a6dac803807f12ae9aa40a1c6fd7e26e42573bd6d4387b7d8997ac451" exitCode=0 Jan 04 12:53:17 crc kubenswrapper[5003]: I0104 12:53:17.169604 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6f8kq" event={"ID":"20147cbe-0709-4360-89e9-4c337ffb3dc2","Type":"ContainerDied","Data":"dc22075a6dac803807f12ae9aa40a1c6fd7e26e42573bd6d4387b7d8997ac451"} Jan 04 12:53:17 crc kubenswrapper[5003]: I0104 12:53:17.169658 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6f8kq" event={"ID":"20147cbe-0709-4360-89e9-4c337ffb3dc2","Type":"ContainerStarted","Data":"a5fb8e6ed3ded7426d5a21eb2989a959630e6f8c856ab795077b246f0f78a1d6"} Jan 04 12:53:17 crc kubenswrapper[5003]: I0104 12:53:17.171378 5003 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 04 12:53:18 crc kubenswrapper[5003]: I0104 12:53:18.179783 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6f8kq" event={"ID":"20147cbe-0709-4360-89e9-4c337ffb3dc2","Type":"ContainerStarted","Data":"b46b032c952dec9ad4e3fd84982d90a6954bd4c11f9718a12bd08556b6295d04"} Jan 04 12:53:19 crc kubenswrapper[5003]: I0104 12:53:19.191595 5003 generic.go:334] "Generic (PLEG): container finished" podID="20147cbe-0709-4360-89e9-4c337ffb3dc2" containerID="b46b032c952dec9ad4e3fd84982d90a6954bd4c11f9718a12bd08556b6295d04" exitCode=0 Jan 04 12:53:19 crc kubenswrapper[5003]: I0104 12:53:19.191674 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6f8kq" event={"ID":"20147cbe-0709-4360-89e9-4c337ffb3dc2","Type":"ContainerDied","Data":"b46b032c952dec9ad4e3fd84982d90a6954bd4c11f9718a12bd08556b6295d04"} Jan 04 12:53:20 crc kubenswrapper[5003]: I0104 12:53:20.208925 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6f8kq" event={"ID":"20147cbe-0709-4360-89e9-4c337ffb3dc2","Type":"ContainerStarted","Data":"9cb036a2da6946adc960b78160692599803cbe895bf084c469c9e07c0d3ec635"} Jan 04 12:53:20 crc kubenswrapper[5003]: I0104 12:53:20.231954 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6f8kq" podStartSLOduration=1.8254793390000001 podStartE2EDuration="4.231931168s" podCreationTimestamp="2026-01-04 12:53:16 +0000 UTC" firstStartedPulling="2026-01-04 12:53:17.171156048 +0000 UTC m=+3912.644185889" lastFinishedPulling="2026-01-04 12:53:19.577607877 +0000 UTC m=+3915.050637718" observedRunningTime="2026-01-04 12:53:20.226873894 +0000 UTC m=+3915.699903745" watchObservedRunningTime="2026-01-04 12:53:20.231931168 +0000 UTC m=+3915.704961009" Jan 04 12:53:20 crc kubenswrapper[5003]: I0104 12:53:20.808272 5003 scope.go:117] "RemoveContainer" containerID="8d0857e0bb3abdaa409575389676fe5a44a7fd3ee14dc99a80c7ebfd877a8c32" Jan 04 12:53:21 crc kubenswrapper[5003]: I0104 12:53:21.221368 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerStarted","Data":"ad47b5ec6c7f3f4fe940d5279c53af7f028a95680050dddd7aa4de6857330099"} Jan 04 12:53:26 crc kubenswrapper[5003]: I0104 12:53:26.608042 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6f8kq" Jan 04 12:53:26 crc kubenswrapper[5003]: I0104 12:53:26.608657 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6f8kq" Jan 04 12:53:26 crc kubenswrapper[5003]: I0104 12:53:26.678741 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6f8kq" Jan 04 12:53:27 crc kubenswrapper[5003]: I0104 12:53:27.341612 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6f8kq" Jan 04 12:53:27 crc kubenswrapper[5003]: I0104 12:53:27.401357 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6f8kq"] Jan 04 12:53:29 crc kubenswrapper[5003]: I0104 12:53:29.285961 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6f8kq" podUID="20147cbe-0709-4360-89e9-4c337ffb3dc2" containerName="registry-server" containerID="cri-o://9cb036a2da6946adc960b78160692599803cbe895bf084c469c9e07c0d3ec635" gracePeriod=2 Jan 04 12:53:30 crc kubenswrapper[5003]: I0104 12:53:30.301531 5003 generic.go:334] "Generic (PLEG): container finished" podID="20147cbe-0709-4360-89e9-4c337ffb3dc2" containerID="9cb036a2da6946adc960b78160692599803cbe895bf084c469c9e07c0d3ec635" exitCode=0 Jan 04 12:53:30 crc kubenswrapper[5003]: I0104 12:53:30.301646 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6f8kq" event={"ID":"20147cbe-0709-4360-89e9-4c337ffb3dc2","Type":"ContainerDied","Data":"9cb036a2da6946adc960b78160692599803cbe895bf084c469c9e07c0d3ec635"} Jan 04 12:53:30 crc kubenswrapper[5003]: I0104 12:53:30.447944 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6f8kq" Jan 04 12:53:30 crc kubenswrapper[5003]: I0104 12:53:30.621624 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20147cbe-0709-4360-89e9-4c337ffb3dc2-utilities\") pod \"20147cbe-0709-4360-89e9-4c337ffb3dc2\" (UID: \"20147cbe-0709-4360-89e9-4c337ffb3dc2\") " Jan 04 12:53:30 crc kubenswrapper[5003]: I0104 12:53:30.621716 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s58xb\" (UniqueName: \"kubernetes.io/projected/20147cbe-0709-4360-89e9-4c337ffb3dc2-kube-api-access-s58xb\") pod \"20147cbe-0709-4360-89e9-4c337ffb3dc2\" (UID: \"20147cbe-0709-4360-89e9-4c337ffb3dc2\") " Jan 04 12:53:30 crc kubenswrapper[5003]: I0104 12:53:30.621754 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20147cbe-0709-4360-89e9-4c337ffb3dc2-catalog-content\") pod \"20147cbe-0709-4360-89e9-4c337ffb3dc2\" (UID: \"20147cbe-0709-4360-89e9-4c337ffb3dc2\") " Jan 04 12:53:30 crc kubenswrapper[5003]: I0104 12:53:30.623201 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20147cbe-0709-4360-89e9-4c337ffb3dc2-utilities" (OuterVolumeSpecName: "utilities") pod "20147cbe-0709-4360-89e9-4c337ffb3dc2" (UID: "20147cbe-0709-4360-89e9-4c337ffb3dc2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:53:30 crc kubenswrapper[5003]: I0104 12:53:30.642837 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20147cbe-0709-4360-89e9-4c337ffb3dc2-kube-api-access-s58xb" (OuterVolumeSpecName: "kube-api-access-s58xb") pod "20147cbe-0709-4360-89e9-4c337ffb3dc2" (UID: "20147cbe-0709-4360-89e9-4c337ffb3dc2"). InnerVolumeSpecName "kube-api-access-s58xb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:53:30 crc kubenswrapper[5003]: I0104 12:53:30.691623 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20147cbe-0709-4360-89e9-4c337ffb3dc2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "20147cbe-0709-4360-89e9-4c337ffb3dc2" (UID: "20147cbe-0709-4360-89e9-4c337ffb3dc2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:53:30 crc kubenswrapper[5003]: I0104 12:53:30.724180 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20147cbe-0709-4360-89e9-4c337ffb3dc2-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:53:30 crc kubenswrapper[5003]: I0104 12:53:30.724240 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s58xb\" (UniqueName: \"kubernetes.io/projected/20147cbe-0709-4360-89e9-4c337ffb3dc2-kube-api-access-s58xb\") on node \"crc\" DevicePath \"\"" Jan 04 12:53:30 crc kubenswrapper[5003]: I0104 12:53:30.724261 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20147cbe-0709-4360-89e9-4c337ffb3dc2-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:53:31 crc kubenswrapper[5003]: I0104 12:53:31.317050 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6f8kq" event={"ID":"20147cbe-0709-4360-89e9-4c337ffb3dc2","Type":"ContainerDied","Data":"a5fb8e6ed3ded7426d5a21eb2989a959630e6f8c856ab795077b246f0f78a1d6"} Jan 04 12:53:31 crc kubenswrapper[5003]: I0104 12:53:31.317128 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6f8kq" Jan 04 12:53:31 crc kubenswrapper[5003]: I0104 12:53:31.317133 5003 scope.go:117] "RemoveContainer" containerID="9cb036a2da6946adc960b78160692599803cbe895bf084c469c9e07c0d3ec635" Jan 04 12:53:31 crc kubenswrapper[5003]: I0104 12:53:31.353920 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6f8kq"] Jan 04 12:53:31 crc kubenswrapper[5003]: I0104 12:53:31.358845 5003 scope.go:117] "RemoveContainer" containerID="b46b032c952dec9ad4e3fd84982d90a6954bd4c11f9718a12bd08556b6295d04" Jan 04 12:53:31 crc kubenswrapper[5003]: I0104 12:53:31.365868 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6f8kq"] Jan 04 12:53:31 crc kubenswrapper[5003]: I0104 12:53:31.380385 5003 scope.go:117] "RemoveContainer" containerID="dc22075a6dac803807f12ae9aa40a1c6fd7e26e42573bd6d4387b7d8997ac451" Jan 04 12:53:32 crc kubenswrapper[5003]: I0104 12:53:32.825882 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20147cbe-0709-4360-89e9-4c337ffb3dc2" path="/var/lib/kubelet/pods/20147cbe-0709-4360-89e9-4c337ffb3dc2/volumes" Jan 04 12:55:22 crc kubenswrapper[5003]: I0104 12:55:22.584969 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xqcbh"] Jan 04 12:55:22 crc kubenswrapper[5003]: E0104 12:55:22.585798 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20147cbe-0709-4360-89e9-4c337ffb3dc2" containerName="registry-server" Jan 04 12:55:22 crc kubenswrapper[5003]: I0104 12:55:22.585812 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="20147cbe-0709-4360-89e9-4c337ffb3dc2" containerName="registry-server" Jan 04 12:55:22 crc kubenswrapper[5003]: E0104 12:55:22.585837 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20147cbe-0709-4360-89e9-4c337ffb3dc2" containerName="extract-utilities" Jan 04 12:55:22 crc kubenswrapper[5003]: I0104 12:55:22.585843 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="20147cbe-0709-4360-89e9-4c337ffb3dc2" containerName="extract-utilities" Jan 04 12:55:22 crc kubenswrapper[5003]: E0104 12:55:22.585858 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20147cbe-0709-4360-89e9-4c337ffb3dc2" containerName="extract-content" Jan 04 12:55:22 crc kubenswrapper[5003]: I0104 12:55:22.585864 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="20147cbe-0709-4360-89e9-4c337ffb3dc2" containerName="extract-content" Jan 04 12:55:22 crc kubenswrapper[5003]: I0104 12:55:22.585999 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="20147cbe-0709-4360-89e9-4c337ffb3dc2" containerName="registry-server" Jan 04 12:55:22 crc kubenswrapper[5003]: I0104 12:55:22.586984 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xqcbh" Jan 04 12:55:22 crc kubenswrapper[5003]: I0104 12:55:22.598268 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79920f1f-c6fe-4294-92e7-ebf5956af72a-catalog-content\") pod \"community-operators-xqcbh\" (UID: \"79920f1f-c6fe-4294-92e7-ebf5956af72a\") " pod="openshift-marketplace/community-operators-xqcbh" Jan 04 12:55:22 crc kubenswrapper[5003]: I0104 12:55:22.598396 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzsxm\" (UniqueName: \"kubernetes.io/projected/79920f1f-c6fe-4294-92e7-ebf5956af72a-kube-api-access-gzsxm\") pod \"community-operators-xqcbh\" (UID: \"79920f1f-c6fe-4294-92e7-ebf5956af72a\") " pod="openshift-marketplace/community-operators-xqcbh" Jan 04 12:55:22 crc kubenswrapper[5003]: I0104 12:55:22.598424 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79920f1f-c6fe-4294-92e7-ebf5956af72a-utilities\") pod \"community-operators-xqcbh\" (UID: \"79920f1f-c6fe-4294-92e7-ebf5956af72a\") " pod="openshift-marketplace/community-operators-xqcbh" Jan 04 12:55:22 crc kubenswrapper[5003]: I0104 12:55:22.611296 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xqcbh"] Jan 04 12:55:22 crc kubenswrapper[5003]: I0104 12:55:22.698938 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzsxm\" (UniqueName: \"kubernetes.io/projected/79920f1f-c6fe-4294-92e7-ebf5956af72a-kube-api-access-gzsxm\") pod \"community-operators-xqcbh\" (UID: \"79920f1f-c6fe-4294-92e7-ebf5956af72a\") " pod="openshift-marketplace/community-operators-xqcbh" Jan 04 12:55:22 crc kubenswrapper[5003]: I0104 12:55:22.698998 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79920f1f-c6fe-4294-92e7-ebf5956af72a-utilities\") pod \"community-operators-xqcbh\" (UID: \"79920f1f-c6fe-4294-92e7-ebf5956af72a\") " pod="openshift-marketplace/community-operators-xqcbh" Jan 04 12:55:22 crc kubenswrapper[5003]: I0104 12:55:22.699066 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79920f1f-c6fe-4294-92e7-ebf5956af72a-catalog-content\") pod \"community-operators-xqcbh\" (UID: \"79920f1f-c6fe-4294-92e7-ebf5956af72a\") " pod="openshift-marketplace/community-operators-xqcbh" Jan 04 12:55:22 crc kubenswrapper[5003]: I0104 12:55:22.699593 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79920f1f-c6fe-4294-92e7-ebf5956af72a-catalog-content\") pod \"community-operators-xqcbh\" (UID: \"79920f1f-c6fe-4294-92e7-ebf5956af72a\") " pod="openshift-marketplace/community-operators-xqcbh" Jan 04 12:55:22 crc kubenswrapper[5003]: I0104 12:55:22.699757 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79920f1f-c6fe-4294-92e7-ebf5956af72a-utilities\") pod \"community-operators-xqcbh\" (UID: \"79920f1f-c6fe-4294-92e7-ebf5956af72a\") " pod="openshift-marketplace/community-operators-xqcbh" Jan 04 12:55:22 crc kubenswrapper[5003]: I0104 12:55:22.729251 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzsxm\" (UniqueName: \"kubernetes.io/projected/79920f1f-c6fe-4294-92e7-ebf5956af72a-kube-api-access-gzsxm\") pod \"community-operators-xqcbh\" (UID: \"79920f1f-c6fe-4294-92e7-ebf5956af72a\") " pod="openshift-marketplace/community-operators-xqcbh" Jan 04 12:55:22 crc kubenswrapper[5003]: I0104 12:55:22.908737 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xqcbh" Jan 04 12:55:23 crc kubenswrapper[5003]: I0104 12:55:23.427372 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xqcbh"] Jan 04 12:55:24 crc kubenswrapper[5003]: I0104 12:55:24.369588 5003 generic.go:334] "Generic (PLEG): container finished" podID="79920f1f-c6fe-4294-92e7-ebf5956af72a" containerID="a6da3e529d9c9eb90ae0021b9bd6d84d6b06abec2db4a380b8e72140616f273e" exitCode=0 Jan 04 12:55:24 crc kubenswrapper[5003]: I0104 12:55:24.369654 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xqcbh" event={"ID":"79920f1f-c6fe-4294-92e7-ebf5956af72a","Type":"ContainerDied","Data":"a6da3e529d9c9eb90ae0021b9bd6d84d6b06abec2db4a380b8e72140616f273e"} Jan 04 12:55:24 crc kubenswrapper[5003]: I0104 12:55:24.370370 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xqcbh" event={"ID":"79920f1f-c6fe-4294-92e7-ebf5956af72a","Type":"ContainerStarted","Data":"e719edb5e858bbde8e683f316222a85ccfc406af80b3702b05e6f40c91405706"} Jan 04 12:55:25 crc kubenswrapper[5003]: I0104 12:55:25.383271 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xqcbh" event={"ID":"79920f1f-c6fe-4294-92e7-ebf5956af72a","Type":"ContainerStarted","Data":"a627271fb90812a01bf4df737e98b2e51c4998bd61962f45f6a2f914553bf9df"} Jan 04 12:55:26 crc kubenswrapper[5003]: I0104 12:55:26.394166 5003 generic.go:334] "Generic (PLEG): container finished" podID="79920f1f-c6fe-4294-92e7-ebf5956af72a" containerID="a627271fb90812a01bf4df737e98b2e51c4998bd61962f45f6a2f914553bf9df" exitCode=0 Jan 04 12:55:26 crc kubenswrapper[5003]: I0104 12:55:26.394233 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xqcbh" event={"ID":"79920f1f-c6fe-4294-92e7-ebf5956af72a","Type":"ContainerDied","Data":"a627271fb90812a01bf4df737e98b2e51c4998bd61962f45f6a2f914553bf9df"} Jan 04 12:55:27 crc kubenswrapper[5003]: I0104 12:55:27.404027 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xqcbh" event={"ID":"79920f1f-c6fe-4294-92e7-ebf5956af72a","Type":"ContainerStarted","Data":"dad55fa5e00ead7f13b3c73688b608ce03ad93985b886ec443e847da87bbd736"} Jan 04 12:55:27 crc kubenswrapper[5003]: I0104 12:55:27.435476 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xqcbh" podStartSLOduration=2.893153686 podStartE2EDuration="5.435448073s" podCreationTimestamp="2026-01-04 12:55:22 +0000 UTC" firstStartedPulling="2026-01-04 12:55:24.372420101 +0000 UTC m=+4039.845449982" lastFinishedPulling="2026-01-04 12:55:26.914714518 +0000 UTC m=+4042.387744369" observedRunningTime="2026-01-04 12:55:27.432193985 +0000 UTC m=+4042.905223836" watchObservedRunningTime="2026-01-04 12:55:27.435448073 +0000 UTC m=+4042.908477944" Jan 04 12:55:32 crc kubenswrapper[5003]: I0104 12:55:32.909258 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xqcbh" Jan 04 12:55:32 crc kubenswrapper[5003]: I0104 12:55:32.910557 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xqcbh" Jan 04 12:55:32 crc kubenswrapper[5003]: I0104 12:55:32.954356 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xqcbh" Jan 04 12:55:33 crc kubenswrapper[5003]: I0104 12:55:33.494002 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xqcbh" Jan 04 12:55:33 crc kubenswrapper[5003]: I0104 12:55:33.539691 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xqcbh"] Jan 04 12:55:35 crc kubenswrapper[5003]: I0104 12:55:35.463571 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xqcbh" podUID="79920f1f-c6fe-4294-92e7-ebf5956af72a" containerName="registry-server" containerID="cri-o://dad55fa5e00ead7f13b3c73688b608ce03ad93985b886ec443e847da87bbd736" gracePeriod=2 Jan 04 12:55:36 crc kubenswrapper[5003]: I0104 12:55:36.353802 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xqcbh" Jan 04 12:55:36 crc kubenswrapper[5003]: I0104 12:55:36.478928 5003 generic.go:334] "Generic (PLEG): container finished" podID="79920f1f-c6fe-4294-92e7-ebf5956af72a" containerID="dad55fa5e00ead7f13b3c73688b608ce03ad93985b886ec443e847da87bbd736" exitCode=0 Jan 04 12:55:36 crc kubenswrapper[5003]: I0104 12:55:36.478972 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xqcbh" event={"ID":"79920f1f-c6fe-4294-92e7-ebf5956af72a","Type":"ContainerDied","Data":"dad55fa5e00ead7f13b3c73688b608ce03ad93985b886ec443e847da87bbd736"} Jan 04 12:55:36 crc kubenswrapper[5003]: I0104 12:55:36.478997 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xqcbh" event={"ID":"79920f1f-c6fe-4294-92e7-ebf5956af72a","Type":"ContainerDied","Data":"e719edb5e858bbde8e683f316222a85ccfc406af80b3702b05e6f40c91405706"} Jan 04 12:55:36 crc kubenswrapper[5003]: I0104 12:55:36.479037 5003 scope.go:117] "RemoveContainer" containerID="dad55fa5e00ead7f13b3c73688b608ce03ad93985b886ec443e847da87bbd736" Jan 04 12:55:36 crc kubenswrapper[5003]: I0104 12:55:36.479040 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xqcbh" Jan 04 12:55:36 crc kubenswrapper[5003]: I0104 12:55:36.500536 5003 scope.go:117] "RemoveContainer" containerID="a627271fb90812a01bf4df737e98b2e51c4998bd61962f45f6a2f914553bf9df" Jan 04 12:55:36 crc kubenswrapper[5003]: I0104 12:55:36.517605 5003 scope.go:117] "RemoveContainer" containerID="a6da3e529d9c9eb90ae0021b9bd6d84d6b06abec2db4a380b8e72140616f273e" Jan 04 12:55:36 crc kubenswrapper[5003]: I0104 12:55:36.542993 5003 scope.go:117] "RemoveContainer" containerID="dad55fa5e00ead7f13b3c73688b608ce03ad93985b886ec443e847da87bbd736" Jan 04 12:55:36 crc kubenswrapper[5003]: I0104 12:55:36.543688 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79920f1f-c6fe-4294-92e7-ebf5956af72a-catalog-content\") pod \"79920f1f-c6fe-4294-92e7-ebf5956af72a\" (UID: \"79920f1f-c6fe-4294-92e7-ebf5956af72a\") " Jan 04 12:55:36 crc kubenswrapper[5003]: I0104 12:55:36.543839 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gzsxm\" (UniqueName: \"kubernetes.io/projected/79920f1f-c6fe-4294-92e7-ebf5956af72a-kube-api-access-gzsxm\") pod \"79920f1f-c6fe-4294-92e7-ebf5956af72a\" (UID: \"79920f1f-c6fe-4294-92e7-ebf5956af72a\") " Jan 04 12:55:36 crc kubenswrapper[5003]: I0104 12:55:36.543919 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79920f1f-c6fe-4294-92e7-ebf5956af72a-utilities\") pod \"79920f1f-c6fe-4294-92e7-ebf5956af72a\" (UID: \"79920f1f-c6fe-4294-92e7-ebf5956af72a\") " Jan 04 12:55:36 crc kubenswrapper[5003]: I0104 12:55:36.545284 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/79920f1f-c6fe-4294-92e7-ebf5956af72a-utilities" (OuterVolumeSpecName: "utilities") pod "79920f1f-c6fe-4294-92e7-ebf5956af72a" (UID: "79920f1f-c6fe-4294-92e7-ebf5956af72a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:55:36 crc kubenswrapper[5003]: I0104 12:55:36.550213 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79920f1f-c6fe-4294-92e7-ebf5956af72a-kube-api-access-gzsxm" (OuterVolumeSpecName: "kube-api-access-gzsxm") pod "79920f1f-c6fe-4294-92e7-ebf5956af72a" (UID: "79920f1f-c6fe-4294-92e7-ebf5956af72a"). InnerVolumeSpecName "kube-api-access-gzsxm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:55:36 crc kubenswrapper[5003]: E0104 12:55:36.552278 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dad55fa5e00ead7f13b3c73688b608ce03ad93985b886ec443e847da87bbd736\": container with ID starting with dad55fa5e00ead7f13b3c73688b608ce03ad93985b886ec443e847da87bbd736 not found: ID does not exist" containerID="dad55fa5e00ead7f13b3c73688b608ce03ad93985b886ec443e847da87bbd736" Jan 04 12:55:36 crc kubenswrapper[5003]: I0104 12:55:36.552377 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dad55fa5e00ead7f13b3c73688b608ce03ad93985b886ec443e847da87bbd736"} err="failed to get container status \"dad55fa5e00ead7f13b3c73688b608ce03ad93985b886ec443e847da87bbd736\": rpc error: code = NotFound desc = could not find container \"dad55fa5e00ead7f13b3c73688b608ce03ad93985b886ec443e847da87bbd736\": container with ID starting with dad55fa5e00ead7f13b3c73688b608ce03ad93985b886ec443e847da87bbd736 not found: ID does not exist" Jan 04 12:55:36 crc kubenswrapper[5003]: I0104 12:55:36.552460 5003 scope.go:117] "RemoveContainer" containerID="a627271fb90812a01bf4df737e98b2e51c4998bd61962f45f6a2f914553bf9df" Jan 04 12:55:36 crc kubenswrapper[5003]: E0104 12:55:36.553181 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a627271fb90812a01bf4df737e98b2e51c4998bd61962f45f6a2f914553bf9df\": container with ID starting with a627271fb90812a01bf4df737e98b2e51c4998bd61962f45f6a2f914553bf9df not found: ID does not exist" containerID="a627271fb90812a01bf4df737e98b2e51c4998bd61962f45f6a2f914553bf9df" Jan 04 12:55:36 crc kubenswrapper[5003]: I0104 12:55:36.553242 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a627271fb90812a01bf4df737e98b2e51c4998bd61962f45f6a2f914553bf9df"} err="failed to get container status \"a627271fb90812a01bf4df737e98b2e51c4998bd61962f45f6a2f914553bf9df\": rpc error: code = NotFound desc = could not find container \"a627271fb90812a01bf4df737e98b2e51c4998bd61962f45f6a2f914553bf9df\": container with ID starting with a627271fb90812a01bf4df737e98b2e51c4998bd61962f45f6a2f914553bf9df not found: ID does not exist" Jan 04 12:55:36 crc kubenswrapper[5003]: I0104 12:55:36.553310 5003 scope.go:117] "RemoveContainer" containerID="a6da3e529d9c9eb90ae0021b9bd6d84d6b06abec2db4a380b8e72140616f273e" Jan 04 12:55:36 crc kubenswrapper[5003]: E0104 12:55:36.553619 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6da3e529d9c9eb90ae0021b9bd6d84d6b06abec2db4a380b8e72140616f273e\": container with ID starting with a6da3e529d9c9eb90ae0021b9bd6d84d6b06abec2db4a380b8e72140616f273e not found: ID does not exist" containerID="a6da3e529d9c9eb90ae0021b9bd6d84d6b06abec2db4a380b8e72140616f273e" Jan 04 12:55:36 crc kubenswrapper[5003]: I0104 12:55:36.553642 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6da3e529d9c9eb90ae0021b9bd6d84d6b06abec2db4a380b8e72140616f273e"} err="failed to get container status \"a6da3e529d9c9eb90ae0021b9bd6d84d6b06abec2db4a380b8e72140616f273e\": rpc error: code = NotFound desc = could not find container \"a6da3e529d9c9eb90ae0021b9bd6d84d6b06abec2db4a380b8e72140616f273e\": container with ID starting with a6da3e529d9c9eb90ae0021b9bd6d84d6b06abec2db4a380b8e72140616f273e not found: ID does not exist" Jan 04 12:55:36 crc kubenswrapper[5003]: I0104 12:55:36.604826 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/79920f1f-c6fe-4294-92e7-ebf5956af72a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "79920f1f-c6fe-4294-92e7-ebf5956af72a" (UID: "79920f1f-c6fe-4294-92e7-ebf5956af72a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:55:36 crc kubenswrapper[5003]: I0104 12:55:36.646201 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79920f1f-c6fe-4294-92e7-ebf5956af72a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:55:36 crc kubenswrapper[5003]: I0104 12:55:36.646234 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gzsxm\" (UniqueName: \"kubernetes.io/projected/79920f1f-c6fe-4294-92e7-ebf5956af72a-kube-api-access-gzsxm\") on node \"crc\" DevicePath \"\"" Jan 04 12:55:36 crc kubenswrapper[5003]: I0104 12:55:36.646248 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79920f1f-c6fe-4294-92e7-ebf5956af72a-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:55:36 crc kubenswrapper[5003]: I0104 12:55:36.817082 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xqcbh"] Jan 04 12:55:36 crc kubenswrapper[5003]: I0104 12:55:36.817131 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xqcbh"] Jan 04 12:55:38 crc kubenswrapper[5003]: I0104 12:55:38.814983 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79920f1f-c6fe-4294-92e7-ebf5956af72a" path="/var/lib/kubelet/pods/79920f1f-c6fe-4294-92e7-ebf5956af72a/volumes" Jan 04 12:55:39 crc kubenswrapper[5003]: I0104 12:55:39.418657 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:55:39 crc kubenswrapper[5003]: I0104 12:55:39.418756 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:56:09 crc kubenswrapper[5003]: I0104 12:56:09.419074 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:56:09 crc kubenswrapper[5003]: I0104 12:56:09.419980 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:56:39 crc kubenswrapper[5003]: I0104 12:56:39.419079 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:56:39 crc kubenswrapper[5003]: I0104 12:56:39.419810 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:56:39 crc kubenswrapper[5003]: I0104 12:56:39.419893 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 12:56:39 crc kubenswrapper[5003]: I0104 12:56:39.420744 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ad47b5ec6c7f3f4fe940d5279c53af7f028a95680050dddd7aa4de6857330099"} pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 12:56:39 crc kubenswrapper[5003]: I0104 12:56:39.420815 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" containerID="cri-o://ad47b5ec6c7f3f4fe940d5279c53af7f028a95680050dddd7aa4de6857330099" gracePeriod=600 Jan 04 12:56:40 crc kubenswrapper[5003]: I0104 12:56:40.045642 5003 generic.go:334] "Generic (PLEG): container finished" podID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerID="ad47b5ec6c7f3f4fe940d5279c53af7f028a95680050dddd7aa4de6857330099" exitCode=0 Jan 04 12:56:40 crc kubenswrapper[5003]: I0104 12:56:40.045763 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerDied","Data":"ad47b5ec6c7f3f4fe940d5279c53af7f028a95680050dddd7aa4de6857330099"} Jan 04 12:56:40 crc kubenswrapper[5003]: I0104 12:56:40.046360 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerStarted","Data":"ef496dc7266a21a9af0a37eacc1fc9da09ed51231565d9c67fcd5791a1bf1c7a"} Jan 04 12:56:40 crc kubenswrapper[5003]: I0104 12:56:40.046402 5003 scope.go:117] "RemoveContainer" containerID="8d0857e0bb3abdaa409575389676fe5a44a7fd3ee14dc99a80c7ebfd877a8c32" Jan 04 12:58:04 crc kubenswrapper[5003]: I0104 12:58:04.415717 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zqtc6"] Jan 04 12:58:04 crc kubenswrapper[5003]: E0104 12:58:04.417449 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79920f1f-c6fe-4294-92e7-ebf5956af72a" containerName="extract-utilities" Jan 04 12:58:04 crc kubenswrapper[5003]: I0104 12:58:04.417478 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="79920f1f-c6fe-4294-92e7-ebf5956af72a" containerName="extract-utilities" Jan 04 12:58:04 crc kubenswrapper[5003]: E0104 12:58:04.417498 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79920f1f-c6fe-4294-92e7-ebf5956af72a" containerName="registry-server" Jan 04 12:58:04 crc kubenswrapper[5003]: I0104 12:58:04.417511 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="79920f1f-c6fe-4294-92e7-ebf5956af72a" containerName="registry-server" Jan 04 12:58:04 crc kubenswrapper[5003]: E0104 12:58:04.417554 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79920f1f-c6fe-4294-92e7-ebf5956af72a" containerName="extract-content" Jan 04 12:58:04 crc kubenswrapper[5003]: I0104 12:58:04.417567 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="79920f1f-c6fe-4294-92e7-ebf5956af72a" containerName="extract-content" Jan 04 12:58:04 crc kubenswrapper[5003]: I0104 12:58:04.417825 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="79920f1f-c6fe-4294-92e7-ebf5956af72a" containerName="registry-server" Jan 04 12:58:04 crc kubenswrapper[5003]: I0104 12:58:04.420796 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zqtc6" Jan 04 12:58:04 crc kubenswrapper[5003]: I0104 12:58:04.425666 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zqtc6"] Jan 04 12:58:04 crc kubenswrapper[5003]: I0104 12:58:04.581883 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6-catalog-content\") pod \"redhat-operators-zqtc6\" (UID: \"ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6\") " pod="openshift-marketplace/redhat-operators-zqtc6" Jan 04 12:58:04 crc kubenswrapper[5003]: I0104 12:58:04.582679 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lv7j2\" (UniqueName: \"kubernetes.io/projected/ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6-kube-api-access-lv7j2\") pod \"redhat-operators-zqtc6\" (UID: \"ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6\") " pod="openshift-marketplace/redhat-operators-zqtc6" Jan 04 12:58:04 crc kubenswrapper[5003]: I0104 12:58:04.583085 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6-utilities\") pod \"redhat-operators-zqtc6\" (UID: \"ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6\") " pod="openshift-marketplace/redhat-operators-zqtc6" Jan 04 12:58:04 crc kubenswrapper[5003]: I0104 12:58:04.687354 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6-catalog-content\") pod \"redhat-operators-zqtc6\" (UID: \"ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6\") " pod="openshift-marketplace/redhat-operators-zqtc6" Jan 04 12:58:04 crc kubenswrapper[5003]: I0104 12:58:04.687484 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lv7j2\" (UniqueName: \"kubernetes.io/projected/ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6-kube-api-access-lv7j2\") pod \"redhat-operators-zqtc6\" (UID: \"ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6\") " pod="openshift-marketplace/redhat-operators-zqtc6" Jan 04 12:58:04 crc kubenswrapper[5003]: I0104 12:58:04.687562 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6-utilities\") pod \"redhat-operators-zqtc6\" (UID: \"ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6\") " pod="openshift-marketplace/redhat-operators-zqtc6" Jan 04 12:58:04 crc kubenswrapper[5003]: I0104 12:58:04.688266 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6-catalog-content\") pod \"redhat-operators-zqtc6\" (UID: \"ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6\") " pod="openshift-marketplace/redhat-operators-zqtc6" Jan 04 12:58:04 crc kubenswrapper[5003]: I0104 12:58:04.688576 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6-utilities\") pod \"redhat-operators-zqtc6\" (UID: \"ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6\") " pod="openshift-marketplace/redhat-operators-zqtc6" Jan 04 12:58:04 crc kubenswrapper[5003]: I0104 12:58:04.711344 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lv7j2\" (UniqueName: \"kubernetes.io/projected/ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6-kube-api-access-lv7j2\") pod \"redhat-operators-zqtc6\" (UID: \"ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6\") " pod="openshift-marketplace/redhat-operators-zqtc6" Jan 04 12:58:04 crc kubenswrapper[5003]: I0104 12:58:04.749765 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zqtc6" Jan 04 12:58:05 crc kubenswrapper[5003]: I0104 12:58:05.272513 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zqtc6"] Jan 04 12:58:05 crc kubenswrapper[5003]: I0104 12:58:05.871779 5003 generic.go:334] "Generic (PLEG): container finished" podID="ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6" containerID="014ce433cecbe7eef2ccd58fea373f27309334dd304dafb59ac949b759195ab0" exitCode=0 Jan 04 12:58:05 crc kubenswrapper[5003]: I0104 12:58:05.871852 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zqtc6" event={"ID":"ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6","Type":"ContainerDied","Data":"014ce433cecbe7eef2ccd58fea373f27309334dd304dafb59ac949b759195ab0"} Jan 04 12:58:05 crc kubenswrapper[5003]: I0104 12:58:05.871894 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zqtc6" event={"ID":"ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6","Type":"ContainerStarted","Data":"d04cad5b56deb7267b0a789a32180b620df944d183e912b7d5a75bc4853fcf62"} Jan 04 12:58:06 crc kubenswrapper[5003]: I0104 12:58:06.883857 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zqtc6" event={"ID":"ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6","Type":"ContainerStarted","Data":"40e5d284c51458d94f711faa91c6f181b4cc3e1d04310439f749b8292f179cc8"} Jan 04 12:58:07 crc kubenswrapper[5003]: I0104 12:58:07.904664 5003 generic.go:334] "Generic (PLEG): container finished" podID="ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6" containerID="40e5d284c51458d94f711faa91c6f181b4cc3e1d04310439f749b8292f179cc8" exitCode=0 Jan 04 12:58:07 crc kubenswrapper[5003]: I0104 12:58:07.904792 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zqtc6" event={"ID":"ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6","Type":"ContainerDied","Data":"40e5d284c51458d94f711faa91c6f181b4cc3e1d04310439f749b8292f179cc8"} Jan 04 12:58:08 crc kubenswrapper[5003]: I0104 12:58:08.920700 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zqtc6" event={"ID":"ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6","Type":"ContainerStarted","Data":"12d6f601808d450d7db77cd223707a57bf395390a3025aa334b230de29c8aa2b"} Jan 04 12:58:08 crc kubenswrapper[5003]: I0104 12:58:08.952086 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zqtc6" podStartSLOduration=2.321278679 podStartE2EDuration="4.952062095s" podCreationTimestamp="2026-01-04 12:58:04 +0000 UTC" firstStartedPulling="2026-01-04 12:58:05.873559237 +0000 UTC m=+4201.346589078" lastFinishedPulling="2026-01-04 12:58:08.504342653 +0000 UTC m=+4203.977372494" observedRunningTime="2026-01-04 12:58:08.948407518 +0000 UTC m=+4204.421437379" watchObservedRunningTime="2026-01-04 12:58:08.952062095 +0000 UTC m=+4204.425091936" Jan 04 12:58:14 crc kubenswrapper[5003]: I0104 12:58:14.750571 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zqtc6" Jan 04 12:58:14 crc kubenswrapper[5003]: I0104 12:58:14.751252 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zqtc6" Jan 04 12:58:15 crc kubenswrapper[5003]: I0104 12:58:15.801053 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-zqtc6" podUID="ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6" containerName="registry-server" probeResult="failure" output=< Jan 04 12:58:15 crc kubenswrapper[5003]: timeout: failed to connect service ":50051" within 1s Jan 04 12:58:15 crc kubenswrapper[5003]: > Jan 04 12:58:24 crc kubenswrapper[5003]: I0104 12:58:24.820142 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zqtc6" Jan 04 12:58:24 crc kubenswrapper[5003]: I0104 12:58:24.887999 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zqtc6" Jan 04 12:58:25 crc kubenswrapper[5003]: I0104 12:58:25.060907 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zqtc6"] Jan 04 12:58:26 crc kubenswrapper[5003]: I0104 12:58:26.078191 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zqtc6" podUID="ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6" containerName="registry-server" containerID="cri-o://12d6f601808d450d7db77cd223707a57bf395390a3025aa334b230de29c8aa2b" gracePeriod=2 Jan 04 12:58:28 crc kubenswrapper[5003]: I0104 12:58:28.096149 5003 generic.go:334] "Generic (PLEG): container finished" podID="ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6" containerID="12d6f601808d450d7db77cd223707a57bf395390a3025aa334b230de29c8aa2b" exitCode=0 Jan 04 12:58:28 crc kubenswrapper[5003]: I0104 12:58:28.096436 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zqtc6" event={"ID":"ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6","Type":"ContainerDied","Data":"12d6f601808d450d7db77cd223707a57bf395390a3025aa334b230de29c8aa2b"} Jan 04 12:58:28 crc kubenswrapper[5003]: I0104 12:58:28.282431 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zqtc6" Jan 04 12:58:28 crc kubenswrapper[5003]: I0104 12:58:28.465173 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6-utilities\") pod \"ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6\" (UID: \"ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6\") " Jan 04 12:58:28 crc kubenswrapper[5003]: I0104 12:58:28.465355 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lv7j2\" (UniqueName: \"kubernetes.io/projected/ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6-kube-api-access-lv7j2\") pod \"ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6\" (UID: \"ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6\") " Jan 04 12:58:28 crc kubenswrapper[5003]: I0104 12:58:28.465470 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6-catalog-content\") pod \"ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6\" (UID: \"ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6\") " Jan 04 12:58:28 crc kubenswrapper[5003]: I0104 12:58:28.466089 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6-utilities" (OuterVolumeSpecName: "utilities") pod "ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6" (UID: "ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:58:28 crc kubenswrapper[5003]: I0104 12:58:28.486261 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6-kube-api-access-lv7j2" (OuterVolumeSpecName: "kube-api-access-lv7j2") pod "ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6" (UID: "ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6"). InnerVolumeSpecName "kube-api-access-lv7j2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 12:58:28 crc kubenswrapper[5003]: I0104 12:58:28.567407 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 12:58:28 crc kubenswrapper[5003]: I0104 12:58:28.567673 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lv7j2\" (UniqueName: \"kubernetes.io/projected/ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6-kube-api-access-lv7j2\") on node \"crc\" DevicePath \"\"" Jan 04 12:58:28 crc kubenswrapper[5003]: I0104 12:58:28.609814 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6" (UID: "ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 12:58:28 crc kubenswrapper[5003]: I0104 12:58:28.669550 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 12:58:29 crc kubenswrapper[5003]: I0104 12:58:29.107146 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zqtc6" event={"ID":"ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6","Type":"ContainerDied","Data":"d04cad5b56deb7267b0a789a32180b620df944d183e912b7d5a75bc4853fcf62"} Jan 04 12:58:29 crc kubenswrapper[5003]: I0104 12:58:29.107226 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zqtc6" Jan 04 12:58:29 crc kubenswrapper[5003]: I0104 12:58:29.107533 5003 scope.go:117] "RemoveContainer" containerID="12d6f601808d450d7db77cd223707a57bf395390a3025aa334b230de29c8aa2b" Jan 04 12:58:29 crc kubenswrapper[5003]: I0104 12:58:29.130275 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zqtc6"] Jan 04 12:58:29 crc kubenswrapper[5003]: I0104 12:58:29.133243 5003 scope.go:117] "RemoveContainer" containerID="40e5d284c51458d94f711faa91c6f181b4cc3e1d04310439f749b8292f179cc8" Jan 04 12:58:29 crc kubenswrapper[5003]: I0104 12:58:29.135185 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zqtc6"] Jan 04 12:58:29 crc kubenswrapper[5003]: I0104 12:58:29.153907 5003 scope.go:117] "RemoveContainer" containerID="014ce433cecbe7eef2ccd58fea373f27309334dd304dafb59ac949b759195ab0" Jan 04 12:58:30 crc kubenswrapper[5003]: I0104 12:58:30.821431 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6" path="/var/lib/kubelet/pods/ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6/volumes" Jan 04 12:58:39 crc kubenswrapper[5003]: I0104 12:58:39.419274 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:58:39 crc kubenswrapper[5003]: I0104 12:58:39.419864 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:59:09 crc kubenswrapper[5003]: I0104 12:59:09.419732 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:59:09 crc kubenswrapper[5003]: I0104 12:59:09.421515 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:59:39 crc kubenswrapper[5003]: I0104 12:59:39.418937 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 12:59:39 crc kubenswrapper[5003]: I0104 12:59:39.419780 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 12:59:39 crc kubenswrapper[5003]: I0104 12:59:39.419862 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 12:59:39 crc kubenswrapper[5003]: I0104 12:59:39.420900 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ef496dc7266a21a9af0a37eacc1fc9da09ed51231565d9c67fcd5791a1bf1c7a"} pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 12:59:39 crc kubenswrapper[5003]: I0104 12:59:39.421085 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" containerID="cri-o://ef496dc7266a21a9af0a37eacc1fc9da09ed51231565d9c67fcd5791a1bf1c7a" gracePeriod=600 Jan 04 12:59:39 crc kubenswrapper[5003]: E0104 12:59:39.549232 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:59:39 crc kubenswrapper[5003]: I0104 12:59:39.843860 5003 generic.go:334] "Generic (PLEG): container finished" podID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerID="ef496dc7266a21a9af0a37eacc1fc9da09ed51231565d9c67fcd5791a1bf1c7a" exitCode=0 Jan 04 12:59:39 crc kubenswrapper[5003]: I0104 12:59:39.843962 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerDied","Data":"ef496dc7266a21a9af0a37eacc1fc9da09ed51231565d9c67fcd5791a1bf1c7a"} Jan 04 12:59:39 crc kubenswrapper[5003]: I0104 12:59:39.844439 5003 scope.go:117] "RemoveContainer" containerID="ad47b5ec6c7f3f4fe940d5279c53af7f028a95680050dddd7aa4de6857330099" Jan 04 12:59:39 crc kubenswrapper[5003]: I0104 12:59:39.845367 5003 scope.go:117] "RemoveContainer" containerID="ef496dc7266a21a9af0a37eacc1fc9da09ed51231565d9c67fcd5791a1bf1c7a" Jan 04 12:59:39 crc kubenswrapper[5003]: E0104 12:59:39.845815 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 12:59:53 crc kubenswrapper[5003]: I0104 12:59:53.807839 5003 scope.go:117] "RemoveContainer" containerID="ef496dc7266a21a9af0a37eacc1fc9da09ed51231565d9c67fcd5791a1bf1c7a" Jan 04 12:59:53 crc kubenswrapper[5003]: E0104 12:59:53.809186 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:00:00 crc kubenswrapper[5003]: I0104 13:00:00.194918 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458860-5mbmn"] Jan 04 13:00:00 crc kubenswrapper[5003]: E0104 13:00:00.196479 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6" containerName="registry-server" Jan 04 13:00:00 crc kubenswrapper[5003]: I0104 13:00:00.196500 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6" containerName="registry-server" Jan 04 13:00:00 crc kubenswrapper[5003]: E0104 13:00:00.196531 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6" containerName="extract-utilities" Jan 04 13:00:00 crc kubenswrapper[5003]: I0104 13:00:00.196537 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6" containerName="extract-utilities" Jan 04 13:00:00 crc kubenswrapper[5003]: E0104 13:00:00.196554 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6" containerName="extract-content" Jan 04 13:00:00 crc kubenswrapper[5003]: I0104 13:00:00.196562 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6" containerName="extract-content" Jan 04 13:00:00 crc kubenswrapper[5003]: I0104 13:00:00.196716 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed9dcefb-b45b-4cc0-8826-a8edb7fdeba6" containerName="registry-server" Jan 04 13:00:00 crc kubenswrapper[5003]: I0104 13:00:00.197506 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458860-5mbmn" Jan 04 13:00:00 crc kubenswrapper[5003]: I0104 13:00:00.200716 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 04 13:00:00 crc kubenswrapper[5003]: I0104 13:00:00.201324 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 04 13:00:00 crc kubenswrapper[5003]: I0104 13:00:00.211084 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458860-5mbmn"] Jan 04 13:00:00 crc kubenswrapper[5003]: I0104 13:00:00.295939 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/970f9201-2190-472f-918c-41f91325175e-secret-volume\") pod \"collect-profiles-29458860-5mbmn\" (UID: \"970f9201-2190-472f-918c-41f91325175e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458860-5mbmn" Jan 04 13:00:00 crc kubenswrapper[5003]: I0104 13:00:00.296272 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/970f9201-2190-472f-918c-41f91325175e-config-volume\") pod \"collect-profiles-29458860-5mbmn\" (UID: \"970f9201-2190-472f-918c-41f91325175e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458860-5mbmn" Jan 04 13:00:00 crc kubenswrapper[5003]: I0104 13:00:00.296504 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ddqbf\" (UniqueName: \"kubernetes.io/projected/970f9201-2190-472f-918c-41f91325175e-kube-api-access-ddqbf\") pod \"collect-profiles-29458860-5mbmn\" (UID: \"970f9201-2190-472f-918c-41f91325175e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458860-5mbmn" Jan 04 13:00:00 crc kubenswrapper[5003]: I0104 13:00:00.397878 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/970f9201-2190-472f-918c-41f91325175e-secret-volume\") pod \"collect-profiles-29458860-5mbmn\" (UID: \"970f9201-2190-472f-918c-41f91325175e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458860-5mbmn" Jan 04 13:00:00 crc kubenswrapper[5003]: I0104 13:00:00.397987 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/970f9201-2190-472f-918c-41f91325175e-config-volume\") pod \"collect-profiles-29458860-5mbmn\" (UID: \"970f9201-2190-472f-918c-41f91325175e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458860-5mbmn" Jan 04 13:00:00 crc kubenswrapper[5003]: I0104 13:00:00.398098 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ddqbf\" (UniqueName: \"kubernetes.io/projected/970f9201-2190-472f-918c-41f91325175e-kube-api-access-ddqbf\") pod \"collect-profiles-29458860-5mbmn\" (UID: \"970f9201-2190-472f-918c-41f91325175e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458860-5mbmn" Jan 04 13:00:00 crc kubenswrapper[5003]: I0104 13:00:00.399246 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/970f9201-2190-472f-918c-41f91325175e-config-volume\") pod \"collect-profiles-29458860-5mbmn\" (UID: \"970f9201-2190-472f-918c-41f91325175e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458860-5mbmn" Jan 04 13:00:00 crc kubenswrapper[5003]: I0104 13:00:00.405266 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/970f9201-2190-472f-918c-41f91325175e-secret-volume\") pod \"collect-profiles-29458860-5mbmn\" (UID: \"970f9201-2190-472f-918c-41f91325175e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458860-5mbmn" Jan 04 13:00:00 crc kubenswrapper[5003]: I0104 13:00:00.418979 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ddqbf\" (UniqueName: \"kubernetes.io/projected/970f9201-2190-472f-918c-41f91325175e-kube-api-access-ddqbf\") pod \"collect-profiles-29458860-5mbmn\" (UID: \"970f9201-2190-472f-918c-41f91325175e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458860-5mbmn" Jan 04 13:00:00 crc kubenswrapper[5003]: I0104 13:00:00.524091 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458860-5mbmn" Jan 04 13:00:00 crc kubenswrapper[5003]: I0104 13:00:00.983255 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458860-5mbmn"] Jan 04 13:00:01 crc kubenswrapper[5003]: I0104 13:00:01.055871 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458860-5mbmn" event={"ID":"970f9201-2190-472f-918c-41f91325175e","Type":"ContainerStarted","Data":"222a8cfceb6729eec118846afa7ed986668ce4d006af108cd3bd30dbb4a5ae00"} Jan 04 13:00:02 crc kubenswrapper[5003]: I0104 13:00:02.064535 5003 generic.go:334] "Generic (PLEG): container finished" podID="970f9201-2190-472f-918c-41f91325175e" containerID="e9c01ff710a1af451c3a18e13574da290e6f27346b3111f98d183606f31c9bb9" exitCode=0 Jan 04 13:00:02 crc kubenswrapper[5003]: I0104 13:00:02.064582 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458860-5mbmn" event={"ID":"970f9201-2190-472f-918c-41f91325175e","Type":"ContainerDied","Data":"e9c01ff710a1af451c3a18e13574da290e6f27346b3111f98d183606f31c9bb9"} Jan 04 13:00:03 crc kubenswrapper[5003]: I0104 13:00:03.360232 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458860-5mbmn" Jan 04 13:00:03 crc kubenswrapper[5003]: I0104 13:00:03.543489 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/970f9201-2190-472f-918c-41f91325175e-config-volume\") pod \"970f9201-2190-472f-918c-41f91325175e\" (UID: \"970f9201-2190-472f-918c-41f91325175e\") " Jan 04 13:00:03 crc kubenswrapper[5003]: I0104 13:00:03.543582 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/970f9201-2190-472f-918c-41f91325175e-secret-volume\") pod \"970f9201-2190-472f-918c-41f91325175e\" (UID: \"970f9201-2190-472f-918c-41f91325175e\") " Jan 04 13:00:03 crc kubenswrapper[5003]: I0104 13:00:03.543655 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ddqbf\" (UniqueName: \"kubernetes.io/projected/970f9201-2190-472f-918c-41f91325175e-kube-api-access-ddqbf\") pod \"970f9201-2190-472f-918c-41f91325175e\" (UID: \"970f9201-2190-472f-918c-41f91325175e\") " Jan 04 13:00:03 crc kubenswrapper[5003]: I0104 13:00:03.545192 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/970f9201-2190-472f-918c-41f91325175e-config-volume" (OuterVolumeSpecName: "config-volume") pod "970f9201-2190-472f-918c-41f91325175e" (UID: "970f9201-2190-472f-918c-41f91325175e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 13:00:03 crc kubenswrapper[5003]: I0104 13:00:03.550547 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/970f9201-2190-472f-918c-41f91325175e-kube-api-access-ddqbf" (OuterVolumeSpecName: "kube-api-access-ddqbf") pod "970f9201-2190-472f-918c-41f91325175e" (UID: "970f9201-2190-472f-918c-41f91325175e"). InnerVolumeSpecName "kube-api-access-ddqbf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:00:03 crc kubenswrapper[5003]: I0104 13:00:03.551294 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/970f9201-2190-472f-918c-41f91325175e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "970f9201-2190-472f-918c-41f91325175e" (UID: "970f9201-2190-472f-918c-41f91325175e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 13:00:03 crc kubenswrapper[5003]: I0104 13:00:03.645745 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ddqbf\" (UniqueName: \"kubernetes.io/projected/970f9201-2190-472f-918c-41f91325175e-kube-api-access-ddqbf\") on node \"crc\" DevicePath \"\"" Jan 04 13:00:03 crc kubenswrapper[5003]: I0104 13:00:03.645784 5003 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/970f9201-2190-472f-918c-41f91325175e-config-volume\") on node \"crc\" DevicePath \"\"" Jan 04 13:00:03 crc kubenswrapper[5003]: I0104 13:00:03.645796 5003 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/970f9201-2190-472f-918c-41f91325175e-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 04 13:00:04 crc kubenswrapper[5003]: I0104 13:00:04.081884 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458860-5mbmn" event={"ID":"970f9201-2190-472f-918c-41f91325175e","Type":"ContainerDied","Data":"222a8cfceb6729eec118846afa7ed986668ce4d006af108cd3bd30dbb4a5ae00"} Jan 04 13:00:04 crc kubenswrapper[5003]: I0104 13:00:04.081930 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="222a8cfceb6729eec118846afa7ed986668ce4d006af108cd3bd30dbb4a5ae00" Jan 04 13:00:04 crc kubenswrapper[5003]: I0104 13:00:04.082348 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458860-5mbmn" Jan 04 13:00:04 crc kubenswrapper[5003]: I0104 13:00:04.430209 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458815-74tvc"] Jan 04 13:00:04 crc kubenswrapper[5003]: I0104 13:00:04.436725 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458815-74tvc"] Jan 04 13:00:04 crc kubenswrapper[5003]: I0104 13:00:04.817892 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72ac7f8a-3e0b-4784-98c7-30a6a7306a77" path="/var/lib/kubelet/pods/72ac7f8a-3e0b-4784-98c7-30a6a7306a77/volumes" Jan 04 13:00:08 crc kubenswrapper[5003]: I0104 13:00:08.807322 5003 scope.go:117] "RemoveContainer" containerID="ef496dc7266a21a9af0a37eacc1fc9da09ed51231565d9c67fcd5791a1bf1c7a" Jan 04 13:00:08 crc kubenswrapper[5003]: E0104 13:00:08.808182 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:00:13 crc kubenswrapper[5003]: I0104 13:00:13.908911 5003 scope.go:117] "RemoveContainer" containerID="e061e1ada410b974cc6e9261e36d1193b4ede8b9844a0fe82b4c5711247be3b1" Jan 04 13:00:19 crc kubenswrapper[5003]: I0104 13:00:19.807061 5003 scope.go:117] "RemoveContainer" containerID="ef496dc7266a21a9af0a37eacc1fc9da09ed51231565d9c67fcd5791a1bf1c7a" Jan 04 13:00:19 crc kubenswrapper[5003]: E0104 13:00:19.808060 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:00:32 crc kubenswrapper[5003]: I0104 13:00:32.806625 5003 scope.go:117] "RemoveContainer" containerID="ef496dc7266a21a9af0a37eacc1fc9da09ed51231565d9c67fcd5791a1bf1c7a" Jan 04 13:00:32 crc kubenswrapper[5003]: E0104 13:00:32.807267 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:00:47 crc kubenswrapper[5003]: I0104 13:00:47.808228 5003 scope.go:117] "RemoveContainer" containerID="ef496dc7266a21a9af0a37eacc1fc9da09ed51231565d9c67fcd5791a1bf1c7a" Jan 04 13:00:47 crc kubenswrapper[5003]: E0104 13:00:47.811110 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:00:59 crc kubenswrapper[5003]: I0104 13:00:59.807625 5003 scope.go:117] "RemoveContainer" containerID="ef496dc7266a21a9af0a37eacc1fc9da09ed51231565d9c67fcd5791a1bf1c7a" Jan 04 13:00:59 crc kubenswrapper[5003]: E0104 13:00:59.808562 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:01:11 crc kubenswrapper[5003]: I0104 13:01:11.806183 5003 scope.go:117] "RemoveContainer" containerID="ef496dc7266a21a9af0a37eacc1fc9da09ed51231565d9c67fcd5791a1bf1c7a" Jan 04 13:01:11 crc kubenswrapper[5003]: E0104 13:01:11.806946 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:01:23 crc kubenswrapper[5003]: I0104 13:01:23.807078 5003 scope.go:117] "RemoveContainer" containerID="ef496dc7266a21a9af0a37eacc1fc9da09ed51231565d9c67fcd5791a1bf1c7a" Jan 04 13:01:23 crc kubenswrapper[5003]: E0104 13:01:23.808223 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:01:36 crc kubenswrapper[5003]: I0104 13:01:36.807237 5003 scope.go:117] "RemoveContainer" containerID="ef496dc7266a21a9af0a37eacc1fc9da09ed51231565d9c67fcd5791a1bf1c7a" Jan 04 13:01:36 crc kubenswrapper[5003]: E0104 13:01:36.808070 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:01:47 crc kubenswrapper[5003]: I0104 13:01:47.807907 5003 scope.go:117] "RemoveContainer" containerID="ef496dc7266a21a9af0a37eacc1fc9da09ed51231565d9c67fcd5791a1bf1c7a" Jan 04 13:01:47 crc kubenswrapper[5003]: E0104 13:01:47.809268 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:01:59 crc kubenswrapper[5003]: I0104 13:01:59.806852 5003 scope.go:117] "RemoveContainer" containerID="ef496dc7266a21a9af0a37eacc1fc9da09ed51231565d9c67fcd5791a1bf1c7a" Jan 04 13:01:59 crc kubenswrapper[5003]: E0104 13:01:59.807468 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:02:13 crc kubenswrapper[5003]: I0104 13:02:13.807739 5003 scope.go:117] "RemoveContainer" containerID="ef496dc7266a21a9af0a37eacc1fc9da09ed51231565d9c67fcd5791a1bf1c7a" Jan 04 13:02:13 crc kubenswrapper[5003]: E0104 13:02:13.809352 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:02:28 crc kubenswrapper[5003]: I0104 13:02:28.807126 5003 scope.go:117] "RemoveContainer" containerID="ef496dc7266a21a9af0a37eacc1fc9da09ed51231565d9c67fcd5791a1bf1c7a" Jan 04 13:02:28 crc kubenswrapper[5003]: E0104 13:02:28.808281 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:02:42 crc kubenswrapper[5003]: I0104 13:02:42.808555 5003 scope.go:117] "RemoveContainer" containerID="ef496dc7266a21a9af0a37eacc1fc9da09ed51231565d9c67fcd5791a1bf1c7a" Jan 04 13:02:42 crc kubenswrapper[5003]: E0104 13:02:42.809799 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:02:54 crc kubenswrapper[5003]: I0104 13:02:54.815080 5003 scope.go:117] "RemoveContainer" containerID="ef496dc7266a21a9af0a37eacc1fc9da09ed51231565d9c67fcd5791a1bf1c7a" Jan 04 13:02:54 crc kubenswrapper[5003]: E0104 13:02:54.816247 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:03:08 crc kubenswrapper[5003]: I0104 13:03:08.806766 5003 scope.go:117] "RemoveContainer" containerID="ef496dc7266a21a9af0a37eacc1fc9da09ed51231565d9c67fcd5791a1bf1c7a" Jan 04 13:03:08 crc kubenswrapper[5003]: E0104 13:03:08.807974 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:03:23 crc kubenswrapper[5003]: I0104 13:03:23.531418 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qn5xm"] Jan 04 13:03:23 crc kubenswrapper[5003]: E0104 13:03:23.533864 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="970f9201-2190-472f-918c-41f91325175e" containerName="collect-profiles" Jan 04 13:03:23 crc kubenswrapper[5003]: I0104 13:03:23.534061 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="970f9201-2190-472f-918c-41f91325175e" containerName="collect-profiles" Jan 04 13:03:23 crc kubenswrapper[5003]: I0104 13:03:23.534583 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="970f9201-2190-472f-918c-41f91325175e" containerName="collect-profiles" Jan 04 13:03:23 crc kubenswrapper[5003]: I0104 13:03:23.540768 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qn5xm" Jan 04 13:03:23 crc kubenswrapper[5003]: I0104 13:03:23.555833 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qn5xm"] Jan 04 13:03:23 crc kubenswrapper[5003]: I0104 13:03:23.579814 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ad26058-e8ab-4887-9050-79bc3c30c5b4-catalog-content\") pod \"certified-operators-qn5xm\" (UID: \"4ad26058-e8ab-4887-9050-79bc3c30c5b4\") " pod="openshift-marketplace/certified-operators-qn5xm" Jan 04 13:03:23 crc kubenswrapper[5003]: I0104 13:03:23.580232 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ad26058-e8ab-4887-9050-79bc3c30c5b4-utilities\") pod \"certified-operators-qn5xm\" (UID: \"4ad26058-e8ab-4887-9050-79bc3c30c5b4\") " pod="openshift-marketplace/certified-operators-qn5xm" Jan 04 13:03:23 crc kubenswrapper[5003]: I0104 13:03:23.580418 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjqdv\" (UniqueName: \"kubernetes.io/projected/4ad26058-e8ab-4887-9050-79bc3c30c5b4-kube-api-access-sjqdv\") pod \"certified-operators-qn5xm\" (UID: \"4ad26058-e8ab-4887-9050-79bc3c30c5b4\") " pod="openshift-marketplace/certified-operators-qn5xm" Jan 04 13:03:23 crc kubenswrapper[5003]: I0104 13:03:23.683055 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjqdv\" (UniqueName: \"kubernetes.io/projected/4ad26058-e8ab-4887-9050-79bc3c30c5b4-kube-api-access-sjqdv\") pod \"certified-operators-qn5xm\" (UID: \"4ad26058-e8ab-4887-9050-79bc3c30c5b4\") " pod="openshift-marketplace/certified-operators-qn5xm" Jan 04 13:03:23 crc kubenswrapper[5003]: I0104 13:03:23.683247 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ad26058-e8ab-4887-9050-79bc3c30c5b4-catalog-content\") pod \"certified-operators-qn5xm\" (UID: \"4ad26058-e8ab-4887-9050-79bc3c30c5b4\") " pod="openshift-marketplace/certified-operators-qn5xm" Jan 04 13:03:23 crc kubenswrapper[5003]: I0104 13:03:23.683341 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ad26058-e8ab-4887-9050-79bc3c30c5b4-utilities\") pod \"certified-operators-qn5xm\" (UID: \"4ad26058-e8ab-4887-9050-79bc3c30c5b4\") " pod="openshift-marketplace/certified-operators-qn5xm" Jan 04 13:03:23 crc kubenswrapper[5003]: I0104 13:03:23.683779 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ad26058-e8ab-4887-9050-79bc3c30c5b4-catalog-content\") pod \"certified-operators-qn5xm\" (UID: \"4ad26058-e8ab-4887-9050-79bc3c30c5b4\") " pod="openshift-marketplace/certified-operators-qn5xm" Jan 04 13:03:23 crc kubenswrapper[5003]: I0104 13:03:23.684371 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ad26058-e8ab-4887-9050-79bc3c30c5b4-utilities\") pod \"certified-operators-qn5xm\" (UID: \"4ad26058-e8ab-4887-9050-79bc3c30c5b4\") " pod="openshift-marketplace/certified-operators-qn5xm" Jan 04 13:03:23 crc kubenswrapper[5003]: I0104 13:03:23.708976 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjqdv\" (UniqueName: \"kubernetes.io/projected/4ad26058-e8ab-4887-9050-79bc3c30c5b4-kube-api-access-sjqdv\") pod \"certified-operators-qn5xm\" (UID: \"4ad26058-e8ab-4887-9050-79bc3c30c5b4\") " pod="openshift-marketplace/certified-operators-qn5xm" Jan 04 13:03:23 crc kubenswrapper[5003]: I0104 13:03:23.807441 5003 scope.go:117] "RemoveContainer" containerID="ef496dc7266a21a9af0a37eacc1fc9da09ed51231565d9c67fcd5791a1bf1c7a" Jan 04 13:03:23 crc kubenswrapper[5003]: E0104 13:03:23.807857 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:03:23 crc kubenswrapper[5003]: I0104 13:03:23.872282 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qn5xm" Jan 04 13:03:24 crc kubenswrapper[5003]: I0104 13:03:24.435036 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qn5xm"] Jan 04 13:03:24 crc kubenswrapper[5003]: I0104 13:03:24.907421 5003 generic.go:334] "Generic (PLEG): container finished" podID="4ad26058-e8ab-4887-9050-79bc3c30c5b4" containerID="875ca34d489f70b04a59ba2d72f257e58f51b3b1bd3d0191c2a312813f0134cb" exitCode=0 Jan 04 13:03:24 crc kubenswrapper[5003]: I0104 13:03:24.908091 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qn5xm" event={"ID":"4ad26058-e8ab-4887-9050-79bc3c30c5b4","Type":"ContainerDied","Data":"875ca34d489f70b04a59ba2d72f257e58f51b3b1bd3d0191c2a312813f0134cb"} Jan 04 13:03:24 crc kubenswrapper[5003]: I0104 13:03:24.908132 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qn5xm" event={"ID":"4ad26058-e8ab-4887-9050-79bc3c30c5b4","Type":"ContainerStarted","Data":"4dc5aa6cbb2b1b18be92af1fc6c06a7f639a5f2d8696a0ef3c49eb73c9d92210"} Jan 04 13:03:24 crc kubenswrapper[5003]: I0104 13:03:24.910342 5003 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 04 13:03:30 crc kubenswrapper[5003]: I0104 13:03:30.960175 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qn5xm" event={"ID":"4ad26058-e8ab-4887-9050-79bc3c30c5b4","Type":"ContainerStarted","Data":"71aac3bfe33f76881d59fc91d32fe848252efe11c074298e9ecf1ff0e27f7d44"} Jan 04 13:03:31 crc kubenswrapper[5003]: I0104 13:03:31.976278 5003 generic.go:334] "Generic (PLEG): container finished" podID="4ad26058-e8ab-4887-9050-79bc3c30c5b4" containerID="71aac3bfe33f76881d59fc91d32fe848252efe11c074298e9ecf1ff0e27f7d44" exitCode=0 Jan 04 13:03:31 crc kubenswrapper[5003]: I0104 13:03:31.977178 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qn5xm" event={"ID":"4ad26058-e8ab-4887-9050-79bc3c30c5b4","Type":"ContainerDied","Data":"71aac3bfe33f76881d59fc91d32fe848252efe11c074298e9ecf1ff0e27f7d44"} Jan 04 13:03:32 crc kubenswrapper[5003]: I0104 13:03:32.986090 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qn5xm" event={"ID":"4ad26058-e8ab-4887-9050-79bc3c30c5b4","Type":"ContainerStarted","Data":"11528213ceba46387b14613c08137c2ce13e0b2f5ec7c1979a9cd9b2cbf30c08"} Jan 04 13:03:33 crc kubenswrapper[5003]: I0104 13:03:33.005702 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qn5xm" podStartSLOduration=2.467229083 podStartE2EDuration="10.00568192s" podCreationTimestamp="2026-01-04 13:03:23 +0000 UTC" firstStartedPulling="2026-01-04 13:03:24.910092358 +0000 UTC m=+4520.383122199" lastFinishedPulling="2026-01-04 13:03:32.448545195 +0000 UTC m=+4527.921575036" observedRunningTime="2026-01-04 13:03:33.002915017 +0000 UTC m=+4528.475944868" watchObservedRunningTime="2026-01-04 13:03:33.00568192 +0000 UTC m=+4528.478711761" Jan 04 13:03:33 crc kubenswrapper[5003]: I0104 13:03:33.872744 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qn5xm" Jan 04 13:03:33 crc kubenswrapper[5003]: I0104 13:03:33.872797 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qn5xm" Jan 04 13:03:34 crc kubenswrapper[5003]: I0104 13:03:34.817207 5003 scope.go:117] "RemoveContainer" containerID="ef496dc7266a21a9af0a37eacc1fc9da09ed51231565d9c67fcd5791a1bf1c7a" Jan 04 13:03:34 crc kubenswrapper[5003]: E0104 13:03:34.817618 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:03:34 crc kubenswrapper[5003]: I0104 13:03:34.945069 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-qn5xm" podUID="4ad26058-e8ab-4887-9050-79bc3c30c5b4" containerName="registry-server" probeResult="failure" output=< Jan 04 13:03:34 crc kubenswrapper[5003]: timeout: failed to connect service ":50051" within 1s Jan 04 13:03:34 crc kubenswrapper[5003]: > Jan 04 13:03:43 crc kubenswrapper[5003]: I0104 13:03:43.919854 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qn5xm" Jan 04 13:03:43 crc kubenswrapper[5003]: I0104 13:03:43.972435 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qn5xm" Jan 04 13:03:44 crc kubenswrapper[5003]: I0104 13:03:44.156604 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qn5xm"] Jan 04 13:03:45 crc kubenswrapper[5003]: I0104 13:03:45.111124 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qn5xm" podUID="4ad26058-e8ab-4887-9050-79bc3c30c5b4" containerName="registry-server" containerID="cri-o://11528213ceba46387b14613c08137c2ce13e0b2f5ec7c1979a9cd9b2cbf30c08" gracePeriod=2 Jan 04 13:03:45 crc kubenswrapper[5003]: I0104 13:03:45.951183 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qn5xm" Jan 04 13:03:46 crc kubenswrapper[5003]: I0104 13:03:46.091146 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sjqdv\" (UniqueName: \"kubernetes.io/projected/4ad26058-e8ab-4887-9050-79bc3c30c5b4-kube-api-access-sjqdv\") pod \"4ad26058-e8ab-4887-9050-79bc3c30c5b4\" (UID: \"4ad26058-e8ab-4887-9050-79bc3c30c5b4\") " Jan 04 13:03:46 crc kubenswrapper[5003]: I0104 13:03:46.091377 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ad26058-e8ab-4887-9050-79bc3c30c5b4-utilities\") pod \"4ad26058-e8ab-4887-9050-79bc3c30c5b4\" (UID: \"4ad26058-e8ab-4887-9050-79bc3c30c5b4\") " Jan 04 13:03:46 crc kubenswrapper[5003]: I0104 13:03:46.091413 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ad26058-e8ab-4887-9050-79bc3c30c5b4-catalog-content\") pod \"4ad26058-e8ab-4887-9050-79bc3c30c5b4\" (UID: \"4ad26058-e8ab-4887-9050-79bc3c30c5b4\") " Jan 04 13:03:46 crc kubenswrapper[5003]: I0104 13:03:46.092427 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ad26058-e8ab-4887-9050-79bc3c30c5b4-utilities" (OuterVolumeSpecName: "utilities") pod "4ad26058-e8ab-4887-9050-79bc3c30c5b4" (UID: "4ad26058-e8ab-4887-9050-79bc3c30c5b4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:03:46 crc kubenswrapper[5003]: I0104 13:03:46.100206 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ad26058-e8ab-4887-9050-79bc3c30c5b4-kube-api-access-sjqdv" (OuterVolumeSpecName: "kube-api-access-sjqdv") pod "4ad26058-e8ab-4887-9050-79bc3c30c5b4" (UID: "4ad26058-e8ab-4887-9050-79bc3c30c5b4"). InnerVolumeSpecName "kube-api-access-sjqdv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:03:46 crc kubenswrapper[5003]: I0104 13:03:46.125052 5003 generic.go:334] "Generic (PLEG): container finished" podID="4ad26058-e8ab-4887-9050-79bc3c30c5b4" containerID="11528213ceba46387b14613c08137c2ce13e0b2f5ec7c1979a9cd9b2cbf30c08" exitCode=0 Jan 04 13:03:46 crc kubenswrapper[5003]: I0104 13:03:46.125104 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qn5xm" event={"ID":"4ad26058-e8ab-4887-9050-79bc3c30c5b4","Type":"ContainerDied","Data":"11528213ceba46387b14613c08137c2ce13e0b2f5ec7c1979a9cd9b2cbf30c08"} Jan 04 13:03:46 crc kubenswrapper[5003]: I0104 13:03:46.125137 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qn5xm" event={"ID":"4ad26058-e8ab-4887-9050-79bc3c30c5b4","Type":"ContainerDied","Data":"4dc5aa6cbb2b1b18be92af1fc6c06a7f639a5f2d8696a0ef3c49eb73c9d92210"} Jan 04 13:03:46 crc kubenswrapper[5003]: I0104 13:03:46.125162 5003 scope.go:117] "RemoveContainer" containerID="11528213ceba46387b14613c08137c2ce13e0b2f5ec7c1979a9cd9b2cbf30c08" Jan 04 13:03:46 crc kubenswrapper[5003]: I0104 13:03:46.125880 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qn5xm" Jan 04 13:03:46 crc kubenswrapper[5003]: I0104 13:03:46.149894 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ad26058-e8ab-4887-9050-79bc3c30c5b4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4ad26058-e8ab-4887-9050-79bc3c30c5b4" (UID: "4ad26058-e8ab-4887-9050-79bc3c30c5b4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:03:46 crc kubenswrapper[5003]: I0104 13:03:46.161807 5003 scope.go:117] "RemoveContainer" containerID="71aac3bfe33f76881d59fc91d32fe848252efe11c074298e9ecf1ff0e27f7d44" Jan 04 13:03:46 crc kubenswrapper[5003]: I0104 13:03:46.198959 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ad26058-e8ab-4887-9050-79bc3c30c5b4-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 13:03:46 crc kubenswrapper[5003]: I0104 13:03:46.199172 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ad26058-e8ab-4887-9050-79bc3c30c5b4-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 13:03:46 crc kubenswrapper[5003]: I0104 13:03:46.199253 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sjqdv\" (UniqueName: \"kubernetes.io/projected/4ad26058-e8ab-4887-9050-79bc3c30c5b4-kube-api-access-sjqdv\") on node \"crc\" DevicePath \"\"" Jan 04 13:03:46 crc kubenswrapper[5003]: I0104 13:03:46.201906 5003 scope.go:117] "RemoveContainer" containerID="875ca34d489f70b04a59ba2d72f257e58f51b3b1bd3d0191c2a312813f0134cb" Jan 04 13:03:46 crc kubenswrapper[5003]: I0104 13:03:46.236189 5003 scope.go:117] "RemoveContainer" containerID="11528213ceba46387b14613c08137c2ce13e0b2f5ec7c1979a9cd9b2cbf30c08" Jan 04 13:03:46 crc kubenswrapper[5003]: E0104 13:03:46.237524 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"11528213ceba46387b14613c08137c2ce13e0b2f5ec7c1979a9cd9b2cbf30c08\": container with ID starting with 11528213ceba46387b14613c08137c2ce13e0b2f5ec7c1979a9cd9b2cbf30c08 not found: ID does not exist" containerID="11528213ceba46387b14613c08137c2ce13e0b2f5ec7c1979a9cd9b2cbf30c08" Jan 04 13:03:46 crc kubenswrapper[5003]: I0104 13:03:46.237728 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11528213ceba46387b14613c08137c2ce13e0b2f5ec7c1979a9cd9b2cbf30c08"} err="failed to get container status \"11528213ceba46387b14613c08137c2ce13e0b2f5ec7c1979a9cd9b2cbf30c08\": rpc error: code = NotFound desc = could not find container \"11528213ceba46387b14613c08137c2ce13e0b2f5ec7c1979a9cd9b2cbf30c08\": container with ID starting with 11528213ceba46387b14613c08137c2ce13e0b2f5ec7c1979a9cd9b2cbf30c08 not found: ID does not exist" Jan 04 13:03:46 crc kubenswrapper[5003]: I0104 13:03:46.237864 5003 scope.go:117] "RemoveContainer" containerID="71aac3bfe33f76881d59fc91d32fe848252efe11c074298e9ecf1ff0e27f7d44" Jan 04 13:03:46 crc kubenswrapper[5003]: E0104 13:03:46.238350 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71aac3bfe33f76881d59fc91d32fe848252efe11c074298e9ecf1ff0e27f7d44\": container with ID starting with 71aac3bfe33f76881d59fc91d32fe848252efe11c074298e9ecf1ff0e27f7d44 not found: ID does not exist" containerID="71aac3bfe33f76881d59fc91d32fe848252efe11c074298e9ecf1ff0e27f7d44" Jan 04 13:03:46 crc kubenswrapper[5003]: I0104 13:03:46.238495 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71aac3bfe33f76881d59fc91d32fe848252efe11c074298e9ecf1ff0e27f7d44"} err="failed to get container status \"71aac3bfe33f76881d59fc91d32fe848252efe11c074298e9ecf1ff0e27f7d44\": rpc error: code = NotFound desc = could not find container \"71aac3bfe33f76881d59fc91d32fe848252efe11c074298e9ecf1ff0e27f7d44\": container with ID starting with 71aac3bfe33f76881d59fc91d32fe848252efe11c074298e9ecf1ff0e27f7d44 not found: ID does not exist" Jan 04 13:03:46 crc kubenswrapper[5003]: I0104 13:03:46.238603 5003 scope.go:117] "RemoveContainer" containerID="875ca34d489f70b04a59ba2d72f257e58f51b3b1bd3d0191c2a312813f0134cb" Jan 04 13:03:46 crc kubenswrapper[5003]: E0104 13:03:46.239004 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"875ca34d489f70b04a59ba2d72f257e58f51b3b1bd3d0191c2a312813f0134cb\": container with ID starting with 875ca34d489f70b04a59ba2d72f257e58f51b3b1bd3d0191c2a312813f0134cb not found: ID does not exist" containerID="875ca34d489f70b04a59ba2d72f257e58f51b3b1bd3d0191c2a312813f0134cb" Jan 04 13:03:46 crc kubenswrapper[5003]: I0104 13:03:46.239089 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"875ca34d489f70b04a59ba2d72f257e58f51b3b1bd3d0191c2a312813f0134cb"} err="failed to get container status \"875ca34d489f70b04a59ba2d72f257e58f51b3b1bd3d0191c2a312813f0134cb\": rpc error: code = NotFound desc = could not find container \"875ca34d489f70b04a59ba2d72f257e58f51b3b1bd3d0191c2a312813f0134cb\": container with ID starting with 875ca34d489f70b04a59ba2d72f257e58f51b3b1bd3d0191c2a312813f0134cb not found: ID does not exist" Jan 04 13:03:46 crc kubenswrapper[5003]: I0104 13:03:46.461542 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qn5xm"] Jan 04 13:03:46 crc kubenswrapper[5003]: I0104 13:03:46.467929 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qn5xm"] Jan 04 13:03:46 crc kubenswrapper[5003]: I0104 13:03:46.838131 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ad26058-e8ab-4887-9050-79bc3c30c5b4" path="/var/lib/kubelet/pods/4ad26058-e8ab-4887-9050-79bc3c30c5b4/volumes" Jan 04 13:03:48 crc kubenswrapper[5003]: I0104 13:03:48.807734 5003 scope.go:117] "RemoveContainer" containerID="ef496dc7266a21a9af0a37eacc1fc9da09ed51231565d9c67fcd5791a1bf1c7a" Jan 04 13:03:48 crc kubenswrapper[5003]: E0104 13:03:48.808537 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:04:01 crc kubenswrapper[5003]: I0104 13:04:01.808329 5003 scope.go:117] "RemoveContainer" containerID="ef496dc7266a21a9af0a37eacc1fc9da09ed51231565d9c67fcd5791a1bf1c7a" Jan 04 13:04:01 crc kubenswrapper[5003]: E0104 13:04:01.809790 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:04:16 crc kubenswrapper[5003]: I0104 13:04:16.806395 5003 scope.go:117] "RemoveContainer" containerID="ef496dc7266a21a9af0a37eacc1fc9da09ed51231565d9c67fcd5791a1bf1c7a" Jan 04 13:04:16 crc kubenswrapper[5003]: E0104 13:04:16.808167 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:04:30 crc kubenswrapper[5003]: I0104 13:04:30.810711 5003 scope.go:117] "RemoveContainer" containerID="ef496dc7266a21a9af0a37eacc1fc9da09ed51231565d9c67fcd5791a1bf1c7a" Jan 04 13:04:30 crc kubenswrapper[5003]: E0104 13:04:30.812244 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:04:45 crc kubenswrapper[5003]: I0104 13:04:45.806784 5003 scope.go:117] "RemoveContainer" containerID="ef496dc7266a21a9af0a37eacc1fc9da09ed51231565d9c67fcd5791a1bf1c7a" Jan 04 13:04:46 crc kubenswrapper[5003]: I0104 13:04:46.682824 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerStarted","Data":"d975fa17306542a8366b2b2ca8fac560784ac02e83ac35bd1f2e4557aff34a56"} Jan 04 13:04:50 crc kubenswrapper[5003]: I0104 13:04:50.688836 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-k7ntx"] Jan 04 13:04:50 crc kubenswrapper[5003]: E0104 13:04:50.691613 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ad26058-e8ab-4887-9050-79bc3c30c5b4" containerName="extract-utilities" Jan 04 13:04:50 crc kubenswrapper[5003]: I0104 13:04:50.692007 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ad26058-e8ab-4887-9050-79bc3c30c5b4" containerName="extract-utilities" Jan 04 13:04:50 crc kubenswrapper[5003]: E0104 13:04:50.692056 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ad26058-e8ab-4887-9050-79bc3c30c5b4" containerName="extract-content" Jan 04 13:04:50 crc kubenswrapper[5003]: I0104 13:04:50.692071 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ad26058-e8ab-4887-9050-79bc3c30c5b4" containerName="extract-content" Jan 04 13:04:50 crc kubenswrapper[5003]: E0104 13:04:50.692091 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ad26058-e8ab-4887-9050-79bc3c30c5b4" containerName="registry-server" Jan 04 13:04:50 crc kubenswrapper[5003]: I0104 13:04:50.692103 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ad26058-e8ab-4887-9050-79bc3c30c5b4" containerName="registry-server" Jan 04 13:04:50 crc kubenswrapper[5003]: I0104 13:04:50.692379 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ad26058-e8ab-4887-9050-79bc3c30c5b4" containerName="registry-server" Jan 04 13:04:50 crc kubenswrapper[5003]: I0104 13:04:50.694296 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k7ntx" Jan 04 13:04:50 crc kubenswrapper[5003]: I0104 13:04:50.703523 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-k7ntx"] Jan 04 13:04:50 crc kubenswrapper[5003]: I0104 13:04:50.779749 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6642b732-1b9e-471f-9591-72c7f1d3e97c-catalog-content\") pod \"redhat-marketplace-k7ntx\" (UID: \"6642b732-1b9e-471f-9591-72c7f1d3e97c\") " pod="openshift-marketplace/redhat-marketplace-k7ntx" Jan 04 13:04:50 crc kubenswrapper[5003]: I0104 13:04:50.780749 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6642b732-1b9e-471f-9591-72c7f1d3e97c-utilities\") pod \"redhat-marketplace-k7ntx\" (UID: \"6642b732-1b9e-471f-9591-72c7f1d3e97c\") " pod="openshift-marketplace/redhat-marketplace-k7ntx" Jan 04 13:04:50 crc kubenswrapper[5003]: I0104 13:04:50.780862 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pfws4\" (UniqueName: \"kubernetes.io/projected/6642b732-1b9e-471f-9591-72c7f1d3e97c-kube-api-access-pfws4\") pod \"redhat-marketplace-k7ntx\" (UID: \"6642b732-1b9e-471f-9591-72c7f1d3e97c\") " pod="openshift-marketplace/redhat-marketplace-k7ntx" Jan 04 13:04:50 crc kubenswrapper[5003]: I0104 13:04:50.882326 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6642b732-1b9e-471f-9591-72c7f1d3e97c-utilities\") pod \"redhat-marketplace-k7ntx\" (UID: \"6642b732-1b9e-471f-9591-72c7f1d3e97c\") " pod="openshift-marketplace/redhat-marketplace-k7ntx" Jan 04 13:04:50 crc kubenswrapper[5003]: I0104 13:04:50.882463 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pfws4\" (UniqueName: \"kubernetes.io/projected/6642b732-1b9e-471f-9591-72c7f1d3e97c-kube-api-access-pfws4\") pod \"redhat-marketplace-k7ntx\" (UID: \"6642b732-1b9e-471f-9591-72c7f1d3e97c\") " pod="openshift-marketplace/redhat-marketplace-k7ntx" Jan 04 13:04:50 crc kubenswrapper[5003]: I0104 13:04:50.882522 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6642b732-1b9e-471f-9591-72c7f1d3e97c-catalog-content\") pod \"redhat-marketplace-k7ntx\" (UID: \"6642b732-1b9e-471f-9591-72c7f1d3e97c\") " pod="openshift-marketplace/redhat-marketplace-k7ntx" Jan 04 13:04:50 crc kubenswrapper[5003]: I0104 13:04:50.883071 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6642b732-1b9e-471f-9591-72c7f1d3e97c-utilities\") pod \"redhat-marketplace-k7ntx\" (UID: \"6642b732-1b9e-471f-9591-72c7f1d3e97c\") " pod="openshift-marketplace/redhat-marketplace-k7ntx" Jan 04 13:04:50 crc kubenswrapper[5003]: I0104 13:04:50.883089 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6642b732-1b9e-471f-9591-72c7f1d3e97c-catalog-content\") pod \"redhat-marketplace-k7ntx\" (UID: \"6642b732-1b9e-471f-9591-72c7f1d3e97c\") " pod="openshift-marketplace/redhat-marketplace-k7ntx" Jan 04 13:04:50 crc kubenswrapper[5003]: I0104 13:04:50.907171 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pfws4\" (UniqueName: \"kubernetes.io/projected/6642b732-1b9e-471f-9591-72c7f1d3e97c-kube-api-access-pfws4\") pod \"redhat-marketplace-k7ntx\" (UID: \"6642b732-1b9e-471f-9591-72c7f1d3e97c\") " pod="openshift-marketplace/redhat-marketplace-k7ntx" Jan 04 13:04:51 crc kubenswrapper[5003]: I0104 13:04:51.030152 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k7ntx" Jan 04 13:04:51 crc kubenswrapper[5003]: I0104 13:04:51.473952 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-k7ntx"] Jan 04 13:04:51 crc kubenswrapper[5003]: I0104 13:04:51.732184 5003 generic.go:334] "Generic (PLEG): container finished" podID="6642b732-1b9e-471f-9591-72c7f1d3e97c" containerID="b2653471d3f236d9f7756cea9904903c7608d0e833b5912434a521332255ad78" exitCode=0 Jan 04 13:04:51 crc kubenswrapper[5003]: I0104 13:04:51.732260 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k7ntx" event={"ID":"6642b732-1b9e-471f-9591-72c7f1d3e97c","Type":"ContainerDied","Data":"b2653471d3f236d9f7756cea9904903c7608d0e833b5912434a521332255ad78"} Jan 04 13:04:51 crc kubenswrapper[5003]: I0104 13:04:51.732559 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k7ntx" event={"ID":"6642b732-1b9e-471f-9591-72c7f1d3e97c","Type":"ContainerStarted","Data":"6b90cbd02357bdbe81bb8ae97b9f7699864cfa69e25894e9edbf19b087c7bf43"} Jan 04 13:04:52 crc kubenswrapper[5003]: I0104 13:04:52.746401 5003 generic.go:334] "Generic (PLEG): container finished" podID="6642b732-1b9e-471f-9591-72c7f1d3e97c" containerID="117c35e3d0ee7d287426d3c9ef03c916935790ceb6e142e1a9244d12dba00f75" exitCode=0 Jan 04 13:04:52 crc kubenswrapper[5003]: I0104 13:04:52.746507 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k7ntx" event={"ID":"6642b732-1b9e-471f-9591-72c7f1d3e97c","Type":"ContainerDied","Data":"117c35e3d0ee7d287426d3c9ef03c916935790ceb6e142e1a9244d12dba00f75"} Jan 04 13:04:53 crc kubenswrapper[5003]: I0104 13:04:53.757068 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k7ntx" event={"ID":"6642b732-1b9e-471f-9591-72c7f1d3e97c","Type":"ContainerStarted","Data":"059cb8e4d08a9856af4d3c2aa4e4fcb5f7ba54c8f73a99d498373ee097fae20e"} Jan 04 13:05:01 crc kubenswrapper[5003]: I0104 13:05:01.031128 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-k7ntx" Jan 04 13:05:01 crc kubenswrapper[5003]: I0104 13:05:01.032049 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-k7ntx" Jan 04 13:05:01 crc kubenswrapper[5003]: I0104 13:05:01.087261 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-k7ntx" Jan 04 13:05:01 crc kubenswrapper[5003]: I0104 13:05:01.124820 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-k7ntx" podStartSLOduration=9.632174349 podStartE2EDuration="11.124789671s" podCreationTimestamp="2026-01-04 13:04:50 +0000 UTC" firstStartedPulling="2026-01-04 13:04:51.734821251 +0000 UTC m=+4607.207851122" lastFinishedPulling="2026-01-04 13:04:53.227436593 +0000 UTC m=+4608.700466444" observedRunningTime="2026-01-04 13:04:53.795511879 +0000 UTC m=+4609.268541740" watchObservedRunningTime="2026-01-04 13:05:01.124789671 +0000 UTC m=+4616.597819522" Jan 04 13:05:01 crc kubenswrapper[5003]: I0104 13:05:01.899200 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-k7ntx" Jan 04 13:05:01 crc kubenswrapper[5003]: I0104 13:05:01.969984 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-k7ntx"] Jan 04 13:05:03 crc kubenswrapper[5003]: I0104 13:05:03.850491 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-k7ntx" podUID="6642b732-1b9e-471f-9591-72c7f1d3e97c" containerName="registry-server" containerID="cri-o://059cb8e4d08a9856af4d3c2aa4e4fcb5f7ba54c8f73a99d498373ee097fae20e" gracePeriod=2 Jan 04 13:05:04 crc kubenswrapper[5003]: I0104 13:05:04.819839 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k7ntx" Jan 04 13:05:04 crc kubenswrapper[5003]: I0104 13:05:04.863477 5003 generic.go:334] "Generic (PLEG): container finished" podID="6642b732-1b9e-471f-9591-72c7f1d3e97c" containerID="059cb8e4d08a9856af4d3c2aa4e4fcb5f7ba54c8f73a99d498373ee097fae20e" exitCode=0 Jan 04 13:05:04 crc kubenswrapper[5003]: I0104 13:05:04.863536 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k7ntx" event={"ID":"6642b732-1b9e-471f-9591-72c7f1d3e97c","Type":"ContainerDied","Data":"059cb8e4d08a9856af4d3c2aa4e4fcb5f7ba54c8f73a99d498373ee097fae20e"} Jan 04 13:05:04 crc kubenswrapper[5003]: I0104 13:05:04.863551 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k7ntx" Jan 04 13:05:04 crc kubenswrapper[5003]: I0104 13:05:04.863569 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k7ntx" event={"ID":"6642b732-1b9e-471f-9591-72c7f1d3e97c","Type":"ContainerDied","Data":"6b90cbd02357bdbe81bb8ae97b9f7699864cfa69e25894e9edbf19b087c7bf43"} Jan 04 13:05:04 crc kubenswrapper[5003]: I0104 13:05:04.863591 5003 scope.go:117] "RemoveContainer" containerID="059cb8e4d08a9856af4d3c2aa4e4fcb5f7ba54c8f73a99d498373ee097fae20e" Jan 04 13:05:04 crc kubenswrapper[5003]: I0104 13:05:04.891024 5003 scope.go:117] "RemoveContainer" containerID="117c35e3d0ee7d287426d3c9ef03c916935790ceb6e142e1a9244d12dba00f75" Jan 04 13:05:04 crc kubenswrapper[5003]: I0104 13:05:04.914335 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6642b732-1b9e-471f-9591-72c7f1d3e97c-utilities\") pod \"6642b732-1b9e-471f-9591-72c7f1d3e97c\" (UID: \"6642b732-1b9e-471f-9591-72c7f1d3e97c\") " Jan 04 13:05:04 crc kubenswrapper[5003]: I0104 13:05:04.914505 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6642b732-1b9e-471f-9591-72c7f1d3e97c-catalog-content\") pod \"6642b732-1b9e-471f-9591-72c7f1d3e97c\" (UID: \"6642b732-1b9e-471f-9591-72c7f1d3e97c\") " Jan 04 13:05:04 crc kubenswrapper[5003]: I0104 13:05:04.914568 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pfws4\" (UniqueName: \"kubernetes.io/projected/6642b732-1b9e-471f-9591-72c7f1d3e97c-kube-api-access-pfws4\") pod \"6642b732-1b9e-471f-9591-72c7f1d3e97c\" (UID: \"6642b732-1b9e-471f-9591-72c7f1d3e97c\") " Jan 04 13:05:04 crc kubenswrapper[5003]: I0104 13:05:04.915394 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6642b732-1b9e-471f-9591-72c7f1d3e97c-utilities" (OuterVolumeSpecName: "utilities") pod "6642b732-1b9e-471f-9591-72c7f1d3e97c" (UID: "6642b732-1b9e-471f-9591-72c7f1d3e97c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:05:04 crc kubenswrapper[5003]: I0104 13:05:04.917614 5003 scope.go:117] "RemoveContainer" containerID="b2653471d3f236d9f7756cea9904903c7608d0e833b5912434a521332255ad78" Jan 04 13:05:04 crc kubenswrapper[5003]: I0104 13:05:04.923359 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6642b732-1b9e-471f-9591-72c7f1d3e97c-kube-api-access-pfws4" (OuterVolumeSpecName: "kube-api-access-pfws4") pod "6642b732-1b9e-471f-9591-72c7f1d3e97c" (UID: "6642b732-1b9e-471f-9591-72c7f1d3e97c"). InnerVolumeSpecName "kube-api-access-pfws4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:05:04 crc kubenswrapper[5003]: I0104 13:05:04.942513 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6642b732-1b9e-471f-9591-72c7f1d3e97c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6642b732-1b9e-471f-9591-72c7f1d3e97c" (UID: "6642b732-1b9e-471f-9591-72c7f1d3e97c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:05:04 crc kubenswrapper[5003]: I0104 13:05:04.965362 5003 scope.go:117] "RemoveContainer" containerID="059cb8e4d08a9856af4d3c2aa4e4fcb5f7ba54c8f73a99d498373ee097fae20e" Jan 04 13:05:04 crc kubenswrapper[5003]: E0104 13:05:04.966048 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"059cb8e4d08a9856af4d3c2aa4e4fcb5f7ba54c8f73a99d498373ee097fae20e\": container with ID starting with 059cb8e4d08a9856af4d3c2aa4e4fcb5f7ba54c8f73a99d498373ee097fae20e not found: ID does not exist" containerID="059cb8e4d08a9856af4d3c2aa4e4fcb5f7ba54c8f73a99d498373ee097fae20e" Jan 04 13:05:04 crc kubenswrapper[5003]: I0104 13:05:04.966092 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"059cb8e4d08a9856af4d3c2aa4e4fcb5f7ba54c8f73a99d498373ee097fae20e"} err="failed to get container status \"059cb8e4d08a9856af4d3c2aa4e4fcb5f7ba54c8f73a99d498373ee097fae20e\": rpc error: code = NotFound desc = could not find container \"059cb8e4d08a9856af4d3c2aa4e4fcb5f7ba54c8f73a99d498373ee097fae20e\": container with ID starting with 059cb8e4d08a9856af4d3c2aa4e4fcb5f7ba54c8f73a99d498373ee097fae20e not found: ID does not exist" Jan 04 13:05:04 crc kubenswrapper[5003]: I0104 13:05:04.966121 5003 scope.go:117] "RemoveContainer" containerID="117c35e3d0ee7d287426d3c9ef03c916935790ceb6e142e1a9244d12dba00f75" Jan 04 13:05:04 crc kubenswrapper[5003]: E0104 13:05:04.966663 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"117c35e3d0ee7d287426d3c9ef03c916935790ceb6e142e1a9244d12dba00f75\": container with ID starting with 117c35e3d0ee7d287426d3c9ef03c916935790ceb6e142e1a9244d12dba00f75 not found: ID does not exist" containerID="117c35e3d0ee7d287426d3c9ef03c916935790ceb6e142e1a9244d12dba00f75" Jan 04 13:05:04 crc kubenswrapper[5003]: I0104 13:05:04.966716 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"117c35e3d0ee7d287426d3c9ef03c916935790ceb6e142e1a9244d12dba00f75"} err="failed to get container status \"117c35e3d0ee7d287426d3c9ef03c916935790ceb6e142e1a9244d12dba00f75\": rpc error: code = NotFound desc = could not find container \"117c35e3d0ee7d287426d3c9ef03c916935790ceb6e142e1a9244d12dba00f75\": container with ID starting with 117c35e3d0ee7d287426d3c9ef03c916935790ceb6e142e1a9244d12dba00f75 not found: ID does not exist" Jan 04 13:05:04 crc kubenswrapper[5003]: I0104 13:05:04.966749 5003 scope.go:117] "RemoveContainer" containerID="b2653471d3f236d9f7756cea9904903c7608d0e833b5912434a521332255ad78" Jan 04 13:05:04 crc kubenswrapper[5003]: E0104 13:05:04.967079 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2653471d3f236d9f7756cea9904903c7608d0e833b5912434a521332255ad78\": container with ID starting with b2653471d3f236d9f7756cea9904903c7608d0e833b5912434a521332255ad78 not found: ID does not exist" containerID="b2653471d3f236d9f7756cea9904903c7608d0e833b5912434a521332255ad78" Jan 04 13:05:04 crc kubenswrapper[5003]: I0104 13:05:04.967144 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2653471d3f236d9f7756cea9904903c7608d0e833b5912434a521332255ad78"} err="failed to get container status \"b2653471d3f236d9f7756cea9904903c7608d0e833b5912434a521332255ad78\": rpc error: code = NotFound desc = could not find container \"b2653471d3f236d9f7756cea9904903c7608d0e833b5912434a521332255ad78\": container with ID starting with b2653471d3f236d9f7756cea9904903c7608d0e833b5912434a521332255ad78 not found: ID does not exist" Jan 04 13:05:05 crc kubenswrapper[5003]: I0104 13:05:05.016927 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6642b732-1b9e-471f-9591-72c7f1d3e97c-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 13:05:05 crc kubenswrapper[5003]: I0104 13:05:05.016955 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6642b732-1b9e-471f-9591-72c7f1d3e97c-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 13:05:05 crc kubenswrapper[5003]: I0104 13:05:05.016967 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pfws4\" (UniqueName: \"kubernetes.io/projected/6642b732-1b9e-471f-9591-72c7f1d3e97c-kube-api-access-pfws4\") on node \"crc\" DevicePath \"\"" Jan 04 13:05:05 crc kubenswrapper[5003]: I0104 13:05:05.215527 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-k7ntx"] Jan 04 13:05:05 crc kubenswrapper[5003]: I0104 13:05:05.220504 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-k7ntx"] Jan 04 13:05:06 crc kubenswrapper[5003]: I0104 13:05:06.826292 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6642b732-1b9e-471f-9591-72c7f1d3e97c" path="/var/lib/kubelet/pods/6642b732-1b9e-471f-9591-72c7f1d3e97c/volumes" Jan 04 13:06:11 crc kubenswrapper[5003]: I0104 13:06:11.026071 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vfq26"] Jan 04 13:06:11 crc kubenswrapper[5003]: E0104 13:06:11.027003 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6642b732-1b9e-471f-9591-72c7f1d3e97c" containerName="extract-utilities" Jan 04 13:06:11 crc kubenswrapper[5003]: I0104 13:06:11.027046 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="6642b732-1b9e-471f-9591-72c7f1d3e97c" containerName="extract-utilities" Jan 04 13:06:11 crc kubenswrapper[5003]: E0104 13:06:11.027082 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6642b732-1b9e-471f-9591-72c7f1d3e97c" containerName="registry-server" Jan 04 13:06:11 crc kubenswrapper[5003]: I0104 13:06:11.027091 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="6642b732-1b9e-471f-9591-72c7f1d3e97c" containerName="registry-server" Jan 04 13:06:11 crc kubenswrapper[5003]: E0104 13:06:11.027109 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6642b732-1b9e-471f-9591-72c7f1d3e97c" containerName="extract-content" Jan 04 13:06:11 crc kubenswrapper[5003]: I0104 13:06:11.027117 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="6642b732-1b9e-471f-9591-72c7f1d3e97c" containerName="extract-content" Jan 04 13:06:11 crc kubenswrapper[5003]: I0104 13:06:11.027325 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="6642b732-1b9e-471f-9591-72c7f1d3e97c" containerName="registry-server" Jan 04 13:06:11 crc kubenswrapper[5003]: I0104 13:06:11.028585 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vfq26" Jan 04 13:06:11 crc kubenswrapper[5003]: I0104 13:06:11.037446 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b73a6a90-8299-46e8-99d8-2898fd82cc10-catalog-content\") pod \"community-operators-vfq26\" (UID: \"b73a6a90-8299-46e8-99d8-2898fd82cc10\") " pod="openshift-marketplace/community-operators-vfq26" Jan 04 13:06:11 crc kubenswrapper[5003]: I0104 13:06:11.037532 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7qv9\" (UniqueName: \"kubernetes.io/projected/b73a6a90-8299-46e8-99d8-2898fd82cc10-kube-api-access-x7qv9\") pod \"community-operators-vfq26\" (UID: \"b73a6a90-8299-46e8-99d8-2898fd82cc10\") " pod="openshift-marketplace/community-operators-vfq26" Jan 04 13:06:11 crc kubenswrapper[5003]: I0104 13:06:11.037554 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b73a6a90-8299-46e8-99d8-2898fd82cc10-utilities\") pod \"community-operators-vfq26\" (UID: \"b73a6a90-8299-46e8-99d8-2898fd82cc10\") " pod="openshift-marketplace/community-operators-vfq26" Jan 04 13:06:11 crc kubenswrapper[5003]: I0104 13:06:11.047825 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vfq26"] Jan 04 13:06:11 crc kubenswrapper[5003]: I0104 13:06:11.139426 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7qv9\" (UniqueName: \"kubernetes.io/projected/b73a6a90-8299-46e8-99d8-2898fd82cc10-kube-api-access-x7qv9\") pod \"community-operators-vfq26\" (UID: \"b73a6a90-8299-46e8-99d8-2898fd82cc10\") " pod="openshift-marketplace/community-operators-vfq26" Jan 04 13:06:11 crc kubenswrapper[5003]: I0104 13:06:11.139551 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b73a6a90-8299-46e8-99d8-2898fd82cc10-utilities\") pod \"community-operators-vfq26\" (UID: \"b73a6a90-8299-46e8-99d8-2898fd82cc10\") " pod="openshift-marketplace/community-operators-vfq26" Jan 04 13:06:11 crc kubenswrapper[5003]: I0104 13:06:11.139693 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b73a6a90-8299-46e8-99d8-2898fd82cc10-catalog-content\") pod \"community-operators-vfq26\" (UID: \"b73a6a90-8299-46e8-99d8-2898fd82cc10\") " pod="openshift-marketplace/community-operators-vfq26" Jan 04 13:06:11 crc kubenswrapper[5003]: I0104 13:06:11.140332 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b73a6a90-8299-46e8-99d8-2898fd82cc10-utilities\") pod \"community-operators-vfq26\" (UID: \"b73a6a90-8299-46e8-99d8-2898fd82cc10\") " pod="openshift-marketplace/community-operators-vfq26" Jan 04 13:06:11 crc kubenswrapper[5003]: I0104 13:06:11.140496 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b73a6a90-8299-46e8-99d8-2898fd82cc10-catalog-content\") pod \"community-operators-vfq26\" (UID: \"b73a6a90-8299-46e8-99d8-2898fd82cc10\") " pod="openshift-marketplace/community-operators-vfq26" Jan 04 13:06:11 crc kubenswrapper[5003]: I0104 13:06:11.173366 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7qv9\" (UniqueName: \"kubernetes.io/projected/b73a6a90-8299-46e8-99d8-2898fd82cc10-kube-api-access-x7qv9\") pod \"community-operators-vfq26\" (UID: \"b73a6a90-8299-46e8-99d8-2898fd82cc10\") " pod="openshift-marketplace/community-operators-vfq26" Jan 04 13:06:11 crc kubenswrapper[5003]: I0104 13:06:11.369344 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vfq26" Jan 04 13:06:11 crc kubenswrapper[5003]: I0104 13:06:11.682361 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vfq26"] Jan 04 13:06:12 crc kubenswrapper[5003]: I0104 13:06:12.520592 5003 generic.go:334] "Generic (PLEG): container finished" podID="b73a6a90-8299-46e8-99d8-2898fd82cc10" containerID="55beb3e2987694178e8f69cd8ee5bb919981afede5b22c4b74cf5ef9aea71480" exitCode=0 Jan 04 13:06:12 crc kubenswrapper[5003]: I0104 13:06:12.520662 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vfq26" event={"ID":"b73a6a90-8299-46e8-99d8-2898fd82cc10","Type":"ContainerDied","Data":"55beb3e2987694178e8f69cd8ee5bb919981afede5b22c4b74cf5ef9aea71480"} Jan 04 13:06:12 crc kubenswrapper[5003]: I0104 13:06:12.521765 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vfq26" event={"ID":"b73a6a90-8299-46e8-99d8-2898fd82cc10","Type":"ContainerStarted","Data":"9b5a2cc3b72674aafdb0a33505b041a37127f58f61f0cc1abc22f011e5ec8124"} Jan 04 13:06:14 crc kubenswrapper[5003]: I0104 13:06:14.540590 5003 generic.go:334] "Generic (PLEG): container finished" podID="b73a6a90-8299-46e8-99d8-2898fd82cc10" containerID="14fcf63ba75a073347814e830f24f03bad62e26e653b58e5d122138aadc7a4d3" exitCode=0 Jan 04 13:06:14 crc kubenswrapper[5003]: I0104 13:06:14.540647 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vfq26" event={"ID":"b73a6a90-8299-46e8-99d8-2898fd82cc10","Type":"ContainerDied","Data":"14fcf63ba75a073347814e830f24f03bad62e26e653b58e5d122138aadc7a4d3"} Jan 04 13:06:15 crc kubenswrapper[5003]: I0104 13:06:15.548375 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vfq26" event={"ID":"b73a6a90-8299-46e8-99d8-2898fd82cc10","Type":"ContainerStarted","Data":"0873882a962a5f9c53ea5bfaa0333351af4fa8f190752c58671b6919d74b1bec"} Jan 04 13:06:15 crc kubenswrapper[5003]: I0104 13:06:15.565452 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vfq26" podStartSLOduration=2.963140694 podStartE2EDuration="5.565432392s" podCreationTimestamp="2026-01-04 13:06:10 +0000 UTC" firstStartedPulling="2026-01-04 13:06:12.5237044 +0000 UTC m=+4687.996734241" lastFinishedPulling="2026-01-04 13:06:15.125996078 +0000 UTC m=+4690.599025939" observedRunningTime="2026-01-04 13:06:15.563390508 +0000 UTC m=+4691.036420369" watchObservedRunningTime="2026-01-04 13:06:15.565432392 +0000 UTC m=+4691.038462243" Jan 04 13:06:21 crc kubenswrapper[5003]: I0104 13:06:21.369455 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vfq26" Jan 04 13:06:21 crc kubenswrapper[5003]: I0104 13:06:21.369946 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vfq26" Jan 04 13:06:21 crc kubenswrapper[5003]: I0104 13:06:21.438521 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vfq26" Jan 04 13:06:21 crc kubenswrapper[5003]: I0104 13:06:21.669246 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vfq26" Jan 04 13:06:21 crc kubenswrapper[5003]: I0104 13:06:21.746039 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vfq26"] Jan 04 13:06:23 crc kubenswrapper[5003]: I0104 13:06:23.618457 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-vfq26" podUID="b73a6a90-8299-46e8-99d8-2898fd82cc10" containerName="registry-server" containerID="cri-o://0873882a962a5f9c53ea5bfaa0333351af4fa8f190752c58671b6919d74b1bec" gracePeriod=2 Jan 04 13:06:24 crc kubenswrapper[5003]: I0104 13:06:24.536267 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vfq26" Jan 04 13:06:24 crc kubenswrapper[5003]: I0104 13:06:24.561976 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b73a6a90-8299-46e8-99d8-2898fd82cc10-catalog-content\") pod \"b73a6a90-8299-46e8-99d8-2898fd82cc10\" (UID: \"b73a6a90-8299-46e8-99d8-2898fd82cc10\") " Jan 04 13:06:24 crc kubenswrapper[5003]: I0104 13:06:24.564705 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7qv9\" (UniqueName: \"kubernetes.io/projected/b73a6a90-8299-46e8-99d8-2898fd82cc10-kube-api-access-x7qv9\") pod \"b73a6a90-8299-46e8-99d8-2898fd82cc10\" (UID: \"b73a6a90-8299-46e8-99d8-2898fd82cc10\") " Jan 04 13:06:24 crc kubenswrapper[5003]: I0104 13:06:24.564971 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b73a6a90-8299-46e8-99d8-2898fd82cc10-utilities\") pod \"b73a6a90-8299-46e8-99d8-2898fd82cc10\" (UID: \"b73a6a90-8299-46e8-99d8-2898fd82cc10\") " Jan 04 13:06:24 crc kubenswrapper[5003]: I0104 13:06:24.566736 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b73a6a90-8299-46e8-99d8-2898fd82cc10-utilities" (OuterVolumeSpecName: "utilities") pod "b73a6a90-8299-46e8-99d8-2898fd82cc10" (UID: "b73a6a90-8299-46e8-99d8-2898fd82cc10"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:06:24 crc kubenswrapper[5003]: I0104 13:06:24.584275 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b73a6a90-8299-46e8-99d8-2898fd82cc10-kube-api-access-x7qv9" (OuterVolumeSpecName: "kube-api-access-x7qv9") pod "b73a6a90-8299-46e8-99d8-2898fd82cc10" (UID: "b73a6a90-8299-46e8-99d8-2898fd82cc10"). InnerVolumeSpecName "kube-api-access-x7qv9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:06:24 crc kubenswrapper[5003]: I0104 13:06:24.630353 5003 generic.go:334] "Generic (PLEG): container finished" podID="b73a6a90-8299-46e8-99d8-2898fd82cc10" containerID="0873882a962a5f9c53ea5bfaa0333351af4fa8f190752c58671b6919d74b1bec" exitCode=0 Jan 04 13:06:24 crc kubenswrapper[5003]: I0104 13:06:24.630428 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vfq26" event={"ID":"b73a6a90-8299-46e8-99d8-2898fd82cc10","Type":"ContainerDied","Data":"0873882a962a5f9c53ea5bfaa0333351af4fa8f190752c58671b6919d74b1bec"} Jan 04 13:06:24 crc kubenswrapper[5003]: I0104 13:06:24.630483 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vfq26" event={"ID":"b73a6a90-8299-46e8-99d8-2898fd82cc10","Type":"ContainerDied","Data":"9b5a2cc3b72674aafdb0a33505b041a37127f58f61f0cc1abc22f011e5ec8124"} Jan 04 13:06:24 crc kubenswrapper[5003]: I0104 13:06:24.630516 5003 scope.go:117] "RemoveContainer" containerID="0873882a962a5f9c53ea5bfaa0333351af4fa8f190752c58671b6919d74b1bec" Jan 04 13:06:24 crc kubenswrapper[5003]: I0104 13:06:24.630515 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vfq26" Jan 04 13:06:24 crc kubenswrapper[5003]: I0104 13:06:24.636701 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b73a6a90-8299-46e8-99d8-2898fd82cc10-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b73a6a90-8299-46e8-99d8-2898fd82cc10" (UID: "b73a6a90-8299-46e8-99d8-2898fd82cc10"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:06:24 crc kubenswrapper[5003]: I0104 13:06:24.658467 5003 scope.go:117] "RemoveContainer" containerID="14fcf63ba75a073347814e830f24f03bad62e26e653b58e5d122138aadc7a4d3" Jan 04 13:06:24 crc kubenswrapper[5003]: I0104 13:06:24.666852 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b73a6a90-8299-46e8-99d8-2898fd82cc10-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 13:06:24 crc kubenswrapper[5003]: I0104 13:06:24.666879 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7qv9\" (UniqueName: \"kubernetes.io/projected/b73a6a90-8299-46e8-99d8-2898fd82cc10-kube-api-access-x7qv9\") on node \"crc\" DevicePath \"\"" Jan 04 13:06:24 crc kubenswrapper[5003]: I0104 13:06:24.666894 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b73a6a90-8299-46e8-99d8-2898fd82cc10-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 13:06:24 crc kubenswrapper[5003]: I0104 13:06:24.682638 5003 scope.go:117] "RemoveContainer" containerID="55beb3e2987694178e8f69cd8ee5bb919981afede5b22c4b74cf5ef9aea71480" Jan 04 13:06:24 crc kubenswrapper[5003]: I0104 13:06:24.703337 5003 scope.go:117] "RemoveContainer" containerID="0873882a962a5f9c53ea5bfaa0333351af4fa8f190752c58671b6919d74b1bec" Jan 04 13:06:24 crc kubenswrapper[5003]: E0104 13:06:24.703830 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0873882a962a5f9c53ea5bfaa0333351af4fa8f190752c58671b6919d74b1bec\": container with ID starting with 0873882a962a5f9c53ea5bfaa0333351af4fa8f190752c58671b6919d74b1bec not found: ID does not exist" containerID="0873882a962a5f9c53ea5bfaa0333351af4fa8f190752c58671b6919d74b1bec" Jan 04 13:06:24 crc kubenswrapper[5003]: I0104 13:06:24.703879 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0873882a962a5f9c53ea5bfaa0333351af4fa8f190752c58671b6919d74b1bec"} err="failed to get container status \"0873882a962a5f9c53ea5bfaa0333351af4fa8f190752c58671b6919d74b1bec\": rpc error: code = NotFound desc = could not find container \"0873882a962a5f9c53ea5bfaa0333351af4fa8f190752c58671b6919d74b1bec\": container with ID starting with 0873882a962a5f9c53ea5bfaa0333351af4fa8f190752c58671b6919d74b1bec not found: ID does not exist" Jan 04 13:06:24 crc kubenswrapper[5003]: I0104 13:06:24.703915 5003 scope.go:117] "RemoveContainer" containerID="14fcf63ba75a073347814e830f24f03bad62e26e653b58e5d122138aadc7a4d3" Jan 04 13:06:24 crc kubenswrapper[5003]: E0104 13:06:24.704331 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14fcf63ba75a073347814e830f24f03bad62e26e653b58e5d122138aadc7a4d3\": container with ID starting with 14fcf63ba75a073347814e830f24f03bad62e26e653b58e5d122138aadc7a4d3 not found: ID does not exist" containerID="14fcf63ba75a073347814e830f24f03bad62e26e653b58e5d122138aadc7a4d3" Jan 04 13:06:24 crc kubenswrapper[5003]: I0104 13:06:24.704354 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14fcf63ba75a073347814e830f24f03bad62e26e653b58e5d122138aadc7a4d3"} err="failed to get container status \"14fcf63ba75a073347814e830f24f03bad62e26e653b58e5d122138aadc7a4d3\": rpc error: code = NotFound desc = could not find container \"14fcf63ba75a073347814e830f24f03bad62e26e653b58e5d122138aadc7a4d3\": container with ID starting with 14fcf63ba75a073347814e830f24f03bad62e26e653b58e5d122138aadc7a4d3 not found: ID does not exist" Jan 04 13:06:24 crc kubenswrapper[5003]: I0104 13:06:24.704371 5003 scope.go:117] "RemoveContainer" containerID="55beb3e2987694178e8f69cd8ee5bb919981afede5b22c4b74cf5ef9aea71480" Jan 04 13:06:24 crc kubenswrapper[5003]: E0104 13:06:24.704748 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55beb3e2987694178e8f69cd8ee5bb919981afede5b22c4b74cf5ef9aea71480\": container with ID starting with 55beb3e2987694178e8f69cd8ee5bb919981afede5b22c4b74cf5ef9aea71480 not found: ID does not exist" containerID="55beb3e2987694178e8f69cd8ee5bb919981afede5b22c4b74cf5ef9aea71480" Jan 04 13:06:24 crc kubenswrapper[5003]: I0104 13:06:24.704772 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55beb3e2987694178e8f69cd8ee5bb919981afede5b22c4b74cf5ef9aea71480"} err="failed to get container status \"55beb3e2987694178e8f69cd8ee5bb919981afede5b22c4b74cf5ef9aea71480\": rpc error: code = NotFound desc = could not find container \"55beb3e2987694178e8f69cd8ee5bb919981afede5b22c4b74cf5ef9aea71480\": container with ID starting with 55beb3e2987694178e8f69cd8ee5bb919981afede5b22c4b74cf5ef9aea71480 not found: ID does not exist" Jan 04 13:06:24 crc kubenswrapper[5003]: I0104 13:06:24.970081 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vfq26"] Jan 04 13:06:24 crc kubenswrapper[5003]: I0104 13:06:24.979199 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-vfq26"] Jan 04 13:06:26 crc kubenswrapper[5003]: I0104 13:06:26.822596 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b73a6a90-8299-46e8-99d8-2898fd82cc10" path="/var/lib/kubelet/pods/b73a6a90-8299-46e8-99d8-2898fd82cc10/volumes" Jan 04 13:07:09 crc kubenswrapper[5003]: I0104 13:07:09.418815 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 13:07:09 crc kubenswrapper[5003]: I0104 13:07:09.420900 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 13:07:39 crc kubenswrapper[5003]: I0104 13:07:39.418837 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 13:07:39 crc kubenswrapper[5003]: I0104 13:07:39.420298 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 13:08:09 crc kubenswrapper[5003]: I0104 13:08:09.418370 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 13:08:09 crc kubenswrapper[5003]: I0104 13:08:09.418878 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 13:08:09 crc kubenswrapper[5003]: I0104 13:08:09.418928 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 13:08:09 crc kubenswrapper[5003]: I0104 13:08:09.419599 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d975fa17306542a8366b2b2ca8fac560784ac02e83ac35bd1f2e4557aff34a56"} pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 13:08:09 crc kubenswrapper[5003]: I0104 13:08:09.419655 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" containerID="cri-o://d975fa17306542a8366b2b2ca8fac560784ac02e83ac35bd1f2e4557aff34a56" gracePeriod=600 Jan 04 13:08:10 crc kubenswrapper[5003]: I0104 13:08:10.518116 5003 generic.go:334] "Generic (PLEG): container finished" podID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerID="d975fa17306542a8366b2b2ca8fac560784ac02e83ac35bd1f2e4557aff34a56" exitCode=0 Jan 04 13:08:10 crc kubenswrapper[5003]: I0104 13:08:10.518218 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerDied","Data":"d975fa17306542a8366b2b2ca8fac560784ac02e83ac35bd1f2e4557aff34a56"} Jan 04 13:08:10 crc kubenswrapper[5003]: I0104 13:08:10.519069 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerStarted","Data":"ab89a3503b868c615a10b0dddcf9602282c1df798b3f18e301d8cf21d5ab3516"} Jan 04 13:08:10 crc kubenswrapper[5003]: I0104 13:08:10.519098 5003 scope.go:117] "RemoveContainer" containerID="ef496dc7266a21a9af0a37eacc1fc9da09ed51231565d9c67fcd5791a1bf1c7a" Jan 04 13:08:14 crc kubenswrapper[5003]: I0104 13:08:14.901061 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2qwzm"] Jan 04 13:08:14 crc kubenswrapper[5003]: E0104 13:08:14.902513 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b73a6a90-8299-46e8-99d8-2898fd82cc10" containerName="extract-utilities" Jan 04 13:08:14 crc kubenswrapper[5003]: I0104 13:08:14.902553 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b73a6a90-8299-46e8-99d8-2898fd82cc10" containerName="extract-utilities" Jan 04 13:08:14 crc kubenswrapper[5003]: E0104 13:08:14.902595 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b73a6a90-8299-46e8-99d8-2898fd82cc10" containerName="registry-server" Jan 04 13:08:14 crc kubenswrapper[5003]: I0104 13:08:14.902616 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b73a6a90-8299-46e8-99d8-2898fd82cc10" containerName="registry-server" Jan 04 13:08:14 crc kubenswrapper[5003]: E0104 13:08:14.902653 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b73a6a90-8299-46e8-99d8-2898fd82cc10" containerName="extract-content" Jan 04 13:08:14 crc kubenswrapper[5003]: I0104 13:08:14.902672 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b73a6a90-8299-46e8-99d8-2898fd82cc10" containerName="extract-content" Jan 04 13:08:14 crc kubenswrapper[5003]: I0104 13:08:14.903676 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b73a6a90-8299-46e8-99d8-2898fd82cc10" containerName="registry-server" Jan 04 13:08:14 crc kubenswrapper[5003]: I0104 13:08:14.907069 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2qwzm" Jan 04 13:08:14 crc kubenswrapper[5003]: I0104 13:08:14.909999 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2qwzm"] Jan 04 13:08:14 crc kubenswrapper[5003]: I0104 13:08:14.956763 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lrl5\" (UniqueName: \"kubernetes.io/projected/edfa601b-0f47-473b-a3d2-91f90064d9e5-kube-api-access-6lrl5\") pod \"redhat-operators-2qwzm\" (UID: \"edfa601b-0f47-473b-a3d2-91f90064d9e5\") " pod="openshift-marketplace/redhat-operators-2qwzm" Jan 04 13:08:14 crc kubenswrapper[5003]: I0104 13:08:14.956825 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edfa601b-0f47-473b-a3d2-91f90064d9e5-utilities\") pod \"redhat-operators-2qwzm\" (UID: \"edfa601b-0f47-473b-a3d2-91f90064d9e5\") " pod="openshift-marketplace/redhat-operators-2qwzm" Jan 04 13:08:14 crc kubenswrapper[5003]: I0104 13:08:14.956875 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edfa601b-0f47-473b-a3d2-91f90064d9e5-catalog-content\") pod \"redhat-operators-2qwzm\" (UID: \"edfa601b-0f47-473b-a3d2-91f90064d9e5\") " pod="openshift-marketplace/redhat-operators-2qwzm" Jan 04 13:08:15 crc kubenswrapper[5003]: I0104 13:08:15.057830 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edfa601b-0f47-473b-a3d2-91f90064d9e5-catalog-content\") pod \"redhat-operators-2qwzm\" (UID: \"edfa601b-0f47-473b-a3d2-91f90064d9e5\") " pod="openshift-marketplace/redhat-operators-2qwzm" Jan 04 13:08:15 crc kubenswrapper[5003]: I0104 13:08:15.057993 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lrl5\" (UniqueName: \"kubernetes.io/projected/edfa601b-0f47-473b-a3d2-91f90064d9e5-kube-api-access-6lrl5\") pod \"redhat-operators-2qwzm\" (UID: \"edfa601b-0f47-473b-a3d2-91f90064d9e5\") " pod="openshift-marketplace/redhat-operators-2qwzm" Jan 04 13:08:15 crc kubenswrapper[5003]: I0104 13:08:15.058057 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edfa601b-0f47-473b-a3d2-91f90064d9e5-utilities\") pod \"redhat-operators-2qwzm\" (UID: \"edfa601b-0f47-473b-a3d2-91f90064d9e5\") " pod="openshift-marketplace/redhat-operators-2qwzm" Jan 04 13:08:15 crc kubenswrapper[5003]: I0104 13:08:15.058390 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edfa601b-0f47-473b-a3d2-91f90064d9e5-catalog-content\") pod \"redhat-operators-2qwzm\" (UID: \"edfa601b-0f47-473b-a3d2-91f90064d9e5\") " pod="openshift-marketplace/redhat-operators-2qwzm" Jan 04 13:08:15 crc kubenswrapper[5003]: I0104 13:08:15.058601 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edfa601b-0f47-473b-a3d2-91f90064d9e5-utilities\") pod \"redhat-operators-2qwzm\" (UID: \"edfa601b-0f47-473b-a3d2-91f90064d9e5\") " pod="openshift-marketplace/redhat-operators-2qwzm" Jan 04 13:08:15 crc kubenswrapper[5003]: I0104 13:08:15.086247 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lrl5\" (UniqueName: \"kubernetes.io/projected/edfa601b-0f47-473b-a3d2-91f90064d9e5-kube-api-access-6lrl5\") pod \"redhat-operators-2qwzm\" (UID: \"edfa601b-0f47-473b-a3d2-91f90064d9e5\") " pod="openshift-marketplace/redhat-operators-2qwzm" Jan 04 13:08:15 crc kubenswrapper[5003]: I0104 13:08:15.240822 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2qwzm" Jan 04 13:08:15 crc kubenswrapper[5003]: I0104 13:08:15.708913 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2qwzm"] Jan 04 13:08:16 crc kubenswrapper[5003]: I0104 13:08:16.581429 5003 generic.go:334] "Generic (PLEG): container finished" podID="edfa601b-0f47-473b-a3d2-91f90064d9e5" containerID="6f14e55c7df61907791433b038e7ba2cf5eb870b14c2dd53a8f670b31eada189" exitCode=0 Jan 04 13:08:16 crc kubenswrapper[5003]: I0104 13:08:16.581861 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2qwzm" event={"ID":"edfa601b-0f47-473b-a3d2-91f90064d9e5","Type":"ContainerDied","Data":"6f14e55c7df61907791433b038e7ba2cf5eb870b14c2dd53a8f670b31eada189"} Jan 04 13:08:16 crc kubenswrapper[5003]: I0104 13:08:16.581902 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2qwzm" event={"ID":"edfa601b-0f47-473b-a3d2-91f90064d9e5","Type":"ContainerStarted","Data":"78d6b04689f17f6ca1f48f111956e473980092851e365524f2c9d04024acaff6"} Jan 04 13:08:17 crc kubenswrapper[5003]: I0104 13:08:17.590911 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2qwzm" event={"ID":"edfa601b-0f47-473b-a3d2-91f90064d9e5","Type":"ContainerStarted","Data":"ca957310f499f9c8b141a5cc57b92bc051c62bda723edb14f8871e467ecdd792"} Jan 04 13:08:18 crc kubenswrapper[5003]: I0104 13:08:18.601326 5003 generic.go:334] "Generic (PLEG): container finished" podID="edfa601b-0f47-473b-a3d2-91f90064d9e5" containerID="ca957310f499f9c8b141a5cc57b92bc051c62bda723edb14f8871e467ecdd792" exitCode=0 Jan 04 13:08:18 crc kubenswrapper[5003]: I0104 13:08:18.601395 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2qwzm" event={"ID":"edfa601b-0f47-473b-a3d2-91f90064d9e5","Type":"ContainerDied","Data":"ca957310f499f9c8b141a5cc57b92bc051c62bda723edb14f8871e467ecdd792"} Jan 04 13:08:19 crc kubenswrapper[5003]: I0104 13:08:19.612661 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2qwzm" event={"ID":"edfa601b-0f47-473b-a3d2-91f90064d9e5","Type":"ContainerStarted","Data":"d808fcf2d74cc2cc697f661774928459e41536c2a9d2fb712565a3405ea8fc9a"} Jan 04 13:08:19 crc kubenswrapper[5003]: I0104 13:08:19.644850 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2qwzm" podStartSLOduration=3.181690399 podStartE2EDuration="5.6448122s" podCreationTimestamp="2026-01-04 13:08:14 +0000 UTC" firstStartedPulling="2026-01-04 13:08:16.584661352 +0000 UTC m=+4812.057691193" lastFinishedPulling="2026-01-04 13:08:19.047783143 +0000 UTC m=+4814.520812994" observedRunningTime="2026-01-04 13:08:19.63238043 +0000 UTC m=+4815.105410371" watchObservedRunningTime="2026-01-04 13:08:19.6448122 +0000 UTC m=+4815.117842081" Jan 04 13:08:25 crc kubenswrapper[5003]: I0104 13:08:25.241090 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2qwzm" Jan 04 13:08:25 crc kubenswrapper[5003]: I0104 13:08:25.241544 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2qwzm" Jan 04 13:08:25 crc kubenswrapper[5003]: I0104 13:08:25.283376 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2qwzm" Jan 04 13:08:25 crc kubenswrapper[5003]: I0104 13:08:25.712403 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2qwzm" Jan 04 13:08:25 crc kubenswrapper[5003]: I0104 13:08:25.760728 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2qwzm"] Jan 04 13:08:27 crc kubenswrapper[5003]: I0104 13:08:27.688676 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-2qwzm" podUID="edfa601b-0f47-473b-a3d2-91f90064d9e5" containerName="registry-server" containerID="cri-o://d808fcf2d74cc2cc697f661774928459e41536c2a9d2fb712565a3405ea8fc9a" gracePeriod=2 Jan 04 13:08:30 crc kubenswrapper[5003]: I0104 13:08:30.717556 5003 generic.go:334] "Generic (PLEG): container finished" podID="edfa601b-0f47-473b-a3d2-91f90064d9e5" containerID="d808fcf2d74cc2cc697f661774928459e41536c2a9d2fb712565a3405ea8fc9a" exitCode=0 Jan 04 13:08:30 crc kubenswrapper[5003]: I0104 13:08:30.717692 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2qwzm" event={"ID":"edfa601b-0f47-473b-a3d2-91f90064d9e5","Type":"ContainerDied","Data":"d808fcf2d74cc2cc697f661774928459e41536c2a9d2fb712565a3405ea8fc9a"} Jan 04 13:08:30 crc kubenswrapper[5003]: I0104 13:08:30.947827 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2qwzm" Jan 04 13:08:31 crc kubenswrapper[5003]: I0104 13:08:31.105358 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edfa601b-0f47-473b-a3d2-91f90064d9e5-catalog-content\") pod \"edfa601b-0f47-473b-a3d2-91f90064d9e5\" (UID: \"edfa601b-0f47-473b-a3d2-91f90064d9e5\") " Jan 04 13:08:31 crc kubenswrapper[5003]: I0104 13:08:31.105564 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edfa601b-0f47-473b-a3d2-91f90064d9e5-utilities\") pod \"edfa601b-0f47-473b-a3d2-91f90064d9e5\" (UID: \"edfa601b-0f47-473b-a3d2-91f90064d9e5\") " Jan 04 13:08:31 crc kubenswrapper[5003]: I0104 13:08:31.105622 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6lrl5\" (UniqueName: \"kubernetes.io/projected/edfa601b-0f47-473b-a3d2-91f90064d9e5-kube-api-access-6lrl5\") pod \"edfa601b-0f47-473b-a3d2-91f90064d9e5\" (UID: \"edfa601b-0f47-473b-a3d2-91f90064d9e5\") " Jan 04 13:08:31 crc kubenswrapper[5003]: I0104 13:08:31.106805 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/edfa601b-0f47-473b-a3d2-91f90064d9e5-utilities" (OuterVolumeSpecName: "utilities") pod "edfa601b-0f47-473b-a3d2-91f90064d9e5" (UID: "edfa601b-0f47-473b-a3d2-91f90064d9e5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:08:31 crc kubenswrapper[5003]: I0104 13:08:31.121748 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/edfa601b-0f47-473b-a3d2-91f90064d9e5-kube-api-access-6lrl5" (OuterVolumeSpecName: "kube-api-access-6lrl5") pod "edfa601b-0f47-473b-a3d2-91f90064d9e5" (UID: "edfa601b-0f47-473b-a3d2-91f90064d9e5"). InnerVolumeSpecName "kube-api-access-6lrl5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:08:31 crc kubenswrapper[5003]: I0104 13:08:31.207794 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edfa601b-0f47-473b-a3d2-91f90064d9e5-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 13:08:31 crc kubenswrapper[5003]: I0104 13:08:31.207834 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6lrl5\" (UniqueName: \"kubernetes.io/projected/edfa601b-0f47-473b-a3d2-91f90064d9e5-kube-api-access-6lrl5\") on node \"crc\" DevicePath \"\"" Jan 04 13:08:31 crc kubenswrapper[5003]: I0104 13:08:31.224915 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/edfa601b-0f47-473b-a3d2-91f90064d9e5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "edfa601b-0f47-473b-a3d2-91f90064d9e5" (UID: "edfa601b-0f47-473b-a3d2-91f90064d9e5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:08:31 crc kubenswrapper[5003]: I0104 13:08:31.310205 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edfa601b-0f47-473b-a3d2-91f90064d9e5-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 13:08:31 crc kubenswrapper[5003]: I0104 13:08:31.730779 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2qwzm" event={"ID":"edfa601b-0f47-473b-a3d2-91f90064d9e5","Type":"ContainerDied","Data":"78d6b04689f17f6ca1f48f111956e473980092851e365524f2c9d04024acaff6"} Jan 04 13:08:31 crc kubenswrapper[5003]: I0104 13:08:31.730852 5003 scope.go:117] "RemoveContainer" containerID="d808fcf2d74cc2cc697f661774928459e41536c2a9d2fb712565a3405ea8fc9a" Jan 04 13:08:31 crc kubenswrapper[5003]: I0104 13:08:31.731062 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2qwzm" Jan 04 13:08:31 crc kubenswrapper[5003]: I0104 13:08:31.773530 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2qwzm"] Jan 04 13:08:31 crc kubenswrapper[5003]: I0104 13:08:31.774145 5003 scope.go:117] "RemoveContainer" containerID="ca957310f499f9c8b141a5cc57b92bc051c62bda723edb14f8871e467ecdd792" Jan 04 13:08:31 crc kubenswrapper[5003]: I0104 13:08:31.780583 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-2qwzm"] Jan 04 13:08:31 crc kubenswrapper[5003]: I0104 13:08:31.798188 5003 scope.go:117] "RemoveContainer" containerID="6f14e55c7df61907791433b038e7ba2cf5eb870b14c2dd53a8f670b31eada189" Jan 04 13:08:32 crc kubenswrapper[5003]: I0104 13:08:32.817673 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="edfa601b-0f47-473b-a3d2-91f90064d9e5" path="/var/lib/kubelet/pods/edfa601b-0f47-473b-a3d2-91f90064d9e5/volumes" Jan 04 13:09:25 crc kubenswrapper[5003]: I0104 13:09:25.227277 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-sspn5"] Jan 04 13:09:25 crc kubenswrapper[5003]: I0104 13:09:25.236752 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-sspn5"] Jan 04 13:09:25 crc kubenswrapper[5003]: I0104 13:09:25.432352 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-fkrt4"] Jan 04 13:09:25 crc kubenswrapper[5003]: E0104 13:09:25.432828 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edfa601b-0f47-473b-a3d2-91f90064d9e5" containerName="extract-content" Jan 04 13:09:25 crc kubenswrapper[5003]: I0104 13:09:25.432861 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="edfa601b-0f47-473b-a3d2-91f90064d9e5" containerName="extract-content" Jan 04 13:09:25 crc kubenswrapper[5003]: E0104 13:09:25.432892 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edfa601b-0f47-473b-a3d2-91f90064d9e5" containerName="extract-utilities" Jan 04 13:09:25 crc kubenswrapper[5003]: I0104 13:09:25.432910 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="edfa601b-0f47-473b-a3d2-91f90064d9e5" containerName="extract-utilities" Jan 04 13:09:25 crc kubenswrapper[5003]: E0104 13:09:25.432941 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edfa601b-0f47-473b-a3d2-91f90064d9e5" containerName="registry-server" Jan 04 13:09:25 crc kubenswrapper[5003]: I0104 13:09:25.432958 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="edfa601b-0f47-473b-a3d2-91f90064d9e5" containerName="registry-server" Jan 04 13:09:25 crc kubenswrapper[5003]: I0104 13:09:25.433278 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="edfa601b-0f47-473b-a3d2-91f90064d9e5" containerName="registry-server" Jan 04 13:09:25 crc kubenswrapper[5003]: I0104 13:09:25.434141 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-fkrt4" Jan 04 13:09:25 crc kubenswrapper[5003]: I0104 13:09:25.442668 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-fkrt4"] Jan 04 13:09:25 crc kubenswrapper[5003]: I0104 13:09:25.446456 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 04 13:09:25 crc kubenswrapper[5003]: I0104 13:09:25.446481 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 04 13:09:25 crc kubenswrapper[5003]: I0104 13:09:25.446654 5003 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-mxrrk" Jan 04 13:09:25 crc kubenswrapper[5003]: I0104 13:09:25.446744 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 04 13:09:25 crc kubenswrapper[5003]: I0104 13:09:25.517580 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/b6cdb51f-4a24-40a9-9473-1e061b2d4f50-crc-storage\") pod \"crc-storage-crc-fkrt4\" (UID: \"b6cdb51f-4a24-40a9-9473-1e061b2d4f50\") " pod="crc-storage/crc-storage-crc-fkrt4" Jan 04 13:09:25 crc kubenswrapper[5003]: I0104 13:09:25.518080 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5sjxn\" (UniqueName: \"kubernetes.io/projected/b6cdb51f-4a24-40a9-9473-1e061b2d4f50-kube-api-access-5sjxn\") pod \"crc-storage-crc-fkrt4\" (UID: \"b6cdb51f-4a24-40a9-9473-1e061b2d4f50\") " pod="crc-storage/crc-storage-crc-fkrt4" Jan 04 13:09:25 crc kubenswrapper[5003]: I0104 13:09:25.518160 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/b6cdb51f-4a24-40a9-9473-1e061b2d4f50-node-mnt\") pod \"crc-storage-crc-fkrt4\" (UID: \"b6cdb51f-4a24-40a9-9473-1e061b2d4f50\") " pod="crc-storage/crc-storage-crc-fkrt4" Jan 04 13:09:25 crc kubenswrapper[5003]: I0104 13:09:25.620058 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/b6cdb51f-4a24-40a9-9473-1e061b2d4f50-crc-storage\") pod \"crc-storage-crc-fkrt4\" (UID: \"b6cdb51f-4a24-40a9-9473-1e061b2d4f50\") " pod="crc-storage/crc-storage-crc-fkrt4" Jan 04 13:09:25 crc kubenswrapper[5003]: I0104 13:09:25.620141 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5sjxn\" (UniqueName: \"kubernetes.io/projected/b6cdb51f-4a24-40a9-9473-1e061b2d4f50-kube-api-access-5sjxn\") pod \"crc-storage-crc-fkrt4\" (UID: \"b6cdb51f-4a24-40a9-9473-1e061b2d4f50\") " pod="crc-storage/crc-storage-crc-fkrt4" Jan 04 13:09:25 crc kubenswrapper[5003]: I0104 13:09:25.620220 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/b6cdb51f-4a24-40a9-9473-1e061b2d4f50-node-mnt\") pod \"crc-storage-crc-fkrt4\" (UID: \"b6cdb51f-4a24-40a9-9473-1e061b2d4f50\") " pod="crc-storage/crc-storage-crc-fkrt4" Jan 04 13:09:25 crc kubenswrapper[5003]: I0104 13:09:25.620858 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/b6cdb51f-4a24-40a9-9473-1e061b2d4f50-node-mnt\") pod \"crc-storage-crc-fkrt4\" (UID: \"b6cdb51f-4a24-40a9-9473-1e061b2d4f50\") " pod="crc-storage/crc-storage-crc-fkrt4" Jan 04 13:09:25 crc kubenswrapper[5003]: I0104 13:09:25.621364 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/b6cdb51f-4a24-40a9-9473-1e061b2d4f50-crc-storage\") pod \"crc-storage-crc-fkrt4\" (UID: \"b6cdb51f-4a24-40a9-9473-1e061b2d4f50\") " pod="crc-storage/crc-storage-crc-fkrt4" Jan 04 13:09:25 crc kubenswrapper[5003]: I0104 13:09:25.651687 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5sjxn\" (UniqueName: \"kubernetes.io/projected/b6cdb51f-4a24-40a9-9473-1e061b2d4f50-kube-api-access-5sjxn\") pod \"crc-storage-crc-fkrt4\" (UID: \"b6cdb51f-4a24-40a9-9473-1e061b2d4f50\") " pod="crc-storage/crc-storage-crc-fkrt4" Jan 04 13:09:25 crc kubenswrapper[5003]: I0104 13:09:25.770131 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-fkrt4" Jan 04 13:09:26 crc kubenswrapper[5003]: I0104 13:09:26.267463 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-fkrt4"] Jan 04 13:09:26 crc kubenswrapper[5003]: I0104 13:09:26.386339 5003 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 04 13:09:26 crc kubenswrapper[5003]: I0104 13:09:26.823704 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c023dad7-7cb5-49b0-9405-bfeac57ff9ee" path="/var/lib/kubelet/pods/c023dad7-7cb5-49b0-9405-bfeac57ff9ee/volumes" Jan 04 13:09:27 crc kubenswrapper[5003]: I0104 13:09:27.203377 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-fkrt4" event={"ID":"b6cdb51f-4a24-40a9-9473-1e061b2d4f50","Type":"ContainerStarted","Data":"40426b391ac5be036555cca8d9048604a2d05c6c3f5b947e18b6d660cb35f26c"} Jan 04 13:09:27 crc kubenswrapper[5003]: I0104 13:09:27.203434 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-fkrt4" event={"ID":"b6cdb51f-4a24-40a9-9473-1e061b2d4f50","Type":"ContainerStarted","Data":"e04e0f702e56eca8820e039cde13e0dc40d823f2d840ac16a1de9b6433d13aa5"} Jan 04 13:09:27 crc kubenswrapper[5003]: I0104 13:09:27.230045 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="crc-storage/crc-storage-crc-fkrt4" podStartSLOduration=1.646536018 podStartE2EDuration="2.230006226s" podCreationTimestamp="2026-01-04 13:09:25 +0000 UTC" firstStartedPulling="2026-01-04 13:09:26.386134507 +0000 UTC m=+4881.859164348" lastFinishedPulling="2026-01-04 13:09:26.969604685 +0000 UTC m=+4882.442634556" observedRunningTime="2026-01-04 13:09:27.228756313 +0000 UTC m=+4882.701786154" watchObservedRunningTime="2026-01-04 13:09:27.230006226 +0000 UTC m=+4882.703036067" Jan 04 13:09:28 crc kubenswrapper[5003]: I0104 13:09:28.215111 5003 generic.go:334] "Generic (PLEG): container finished" podID="b6cdb51f-4a24-40a9-9473-1e061b2d4f50" containerID="40426b391ac5be036555cca8d9048604a2d05c6c3f5b947e18b6d660cb35f26c" exitCode=0 Jan 04 13:09:28 crc kubenswrapper[5003]: I0104 13:09:28.215185 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-fkrt4" event={"ID":"b6cdb51f-4a24-40a9-9473-1e061b2d4f50","Type":"ContainerDied","Data":"40426b391ac5be036555cca8d9048604a2d05c6c3f5b947e18b6d660cb35f26c"} Jan 04 13:09:29 crc kubenswrapper[5003]: I0104 13:09:29.633393 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-fkrt4" Jan 04 13:09:29 crc kubenswrapper[5003]: I0104 13:09:29.794936 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/b6cdb51f-4a24-40a9-9473-1e061b2d4f50-crc-storage\") pod \"b6cdb51f-4a24-40a9-9473-1e061b2d4f50\" (UID: \"b6cdb51f-4a24-40a9-9473-1e061b2d4f50\") " Jan 04 13:09:29 crc kubenswrapper[5003]: I0104 13:09:29.795119 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/b6cdb51f-4a24-40a9-9473-1e061b2d4f50-node-mnt\") pod \"b6cdb51f-4a24-40a9-9473-1e061b2d4f50\" (UID: \"b6cdb51f-4a24-40a9-9473-1e061b2d4f50\") " Jan 04 13:09:29 crc kubenswrapper[5003]: I0104 13:09:29.795202 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5sjxn\" (UniqueName: \"kubernetes.io/projected/b6cdb51f-4a24-40a9-9473-1e061b2d4f50-kube-api-access-5sjxn\") pod \"b6cdb51f-4a24-40a9-9473-1e061b2d4f50\" (UID: \"b6cdb51f-4a24-40a9-9473-1e061b2d4f50\") " Jan 04 13:09:29 crc kubenswrapper[5003]: I0104 13:09:29.795249 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b6cdb51f-4a24-40a9-9473-1e061b2d4f50-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "b6cdb51f-4a24-40a9-9473-1e061b2d4f50" (UID: "b6cdb51f-4a24-40a9-9473-1e061b2d4f50"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 13:09:29 crc kubenswrapper[5003]: I0104 13:09:29.795587 5003 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/b6cdb51f-4a24-40a9-9473-1e061b2d4f50-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 04 13:09:29 crc kubenswrapper[5003]: I0104 13:09:29.805499 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cdb51f-4a24-40a9-9473-1e061b2d4f50-kube-api-access-5sjxn" (OuterVolumeSpecName: "kube-api-access-5sjxn") pod "b6cdb51f-4a24-40a9-9473-1e061b2d4f50" (UID: "b6cdb51f-4a24-40a9-9473-1e061b2d4f50"). InnerVolumeSpecName "kube-api-access-5sjxn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:09:29 crc kubenswrapper[5003]: I0104 13:09:29.826505 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cdb51f-4a24-40a9-9473-1e061b2d4f50-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "b6cdb51f-4a24-40a9-9473-1e061b2d4f50" (UID: "b6cdb51f-4a24-40a9-9473-1e061b2d4f50"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 13:09:29 crc kubenswrapper[5003]: I0104 13:09:29.898265 5003 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/b6cdb51f-4a24-40a9-9473-1e061b2d4f50-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 04 13:09:29 crc kubenswrapper[5003]: I0104 13:09:29.898384 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5sjxn\" (UniqueName: \"kubernetes.io/projected/b6cdb51f-4a24-40a9-9473-1e061b2d4f50-kube-api-access-5sjxn\") on node \"crc\" DevicePath \"\"" Jan 04 13:09:30 crc kubenswrapper[5003]: I0104 13:09:30.244720 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-fkrt4" event={"ID":"b6cdb51f-4a24-40a9-9473-1e061b2d4f50","Type":"ContainerDied","Data":"e04e0f702e56eca8820e039cde13e0dc40d823f2d840ac16a1de9b6433d13aa5"} Jan 04 13:09:30 crc kubenswrapper[5003]: I0104 13:09:30.245302 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e04e0f702e56eca8820e039cde13e0dc40d823f2d840ac16a1de9b6433d13aa5" Jan 04 13:09:30 crc kubenswrapper[5003]: I0104 13:09:30.244806 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-fkrt4" Jan 04 13:09:31 crc kubenswrapper[5003]: I0104 13:09:31.858149 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-fkrt4"] Jan 04 13:09:31 crc kubenswrapper[5003]: I0104 13:09:31.863588 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-fkrt4"] Jan 04 13:09:32 crc kubenswrapper[5003]: I0104 13:09:32.045282 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-fzq54"] Jan 04 13:09:32 crc kubenswrapper[5003]: E0104 13:09:32.045564 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6cdb51f-4a24-40a9-9473-1e061b2d4f50" containerName="storage" Jan 04 13:09:32 crc kubenswrapper[5003]: I0104 13:09:32.045576 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6cdb51f-4a24-40a9-9473-1e061b2d4f50" containerName="storage" Jan 04 13:09:32 crc kubenswrapper[5003]: I0104 13:09:32.045715 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6cdb51f-4a24-40a9-9473-1e061b2d4f50" containerName="storage" Jan 04 13:09:32 crc kubenswrapper[5003]: I0104 13:09:32.046267 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-fzq54" Jan 04 13:09:32 crc kubenswrapper[5003]: I0104 13:09:32.048888 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 04 13:09:32 crc kubenswrapper[5003]: I0104 13:09:32.048911 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 04 13:09:32 crc kubenswrapper[5003]: I0104 13:09:32.049186 5003 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-mxrrk" Jan 04 13:09:32 crc kubenswrapper[5003]: I0104 13:09:32.053933 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 04 13:09:32 crc kubenswrapper[5003]: I0104 13:09:32.064081 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-fzq54"] Jan 04 13:09:32 crc kubenswrapper[5003]: I0104 13:09:32.134325 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/fb73ca84-6ff7-4719-bea3-b5e9a2195d41-node-mnt\") pod \"crc-storage-crc-fzq54\" (UID: \"fb73ca84-6ff7-4719-bea3-b5e9a2195d41\") " pod="crc-storage/crc-storage-crc-fzq54" Jan 04 13:09:32 crc kubenswrapper[5003]: I0104 13:09:32.134447 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-std2d\" (UniqueName: \"kubernetes.io/projected/fb73ca84-6ff7-4719-bea3-b5e9a2195d41-kube-api-access-std2d\") pod \"crc-storage-crc-fzq54\" (UID: \"fb73ca84-6ff7-4719-bea3-b5e9a2195d41\") " pod="crc-storage/crc-storage-crc-fzq54" Jan 04 13:09:32 crc kubenswrapper[5003]: I0104 13:09:32.134537 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/fb73ca84-6ff7-4719-bea3-b5e9a2195d41-crc-storage\") pod \"crc-storage-crc-fzq54\" (UID: \"fb73ca84-6ff7-4719-bea3-b5e9a2195d41\") " pod="crc-storage/crc-storage-crc-fzq54" Jan 04 13:09:32 crc kubenswrapper[5003]: I0104 13:09:32.235903 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/fb73ca84-6ff7-4719-bea3-b5e9a2195d41-crc-storage\") pod \"crc-storage-crc-fzq54\" (UID: \"fb73ca84-6ff7-4719-bea3-b5e9a2195d41\") " pod="crc-storage/crc-storage-crc-fzq54" Jan 04 13:09:32 crc kubenswrapper[5003]: I0104 13:09:32.236055 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/fb73ca84-6ff7-4719-bea3-b5e9a2195d41-node-mnt\") pod \"crc-storage-crc-fzq54\" (UID: \"fb73ca84-6ff7-4719-bea3-b5e9a2195d41\") " pod="crc-storage/crc-storage-crc-fzq54" Jan 04 13:09:32 crc kubenswrapper[5003]: I0104 13:09:32.236099 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-std2d\" (UniqueName: \"kubernetes.io/projected/fb73ca84-6ff7-4719-bea3-b5e9a2195d41-kube-api-access-std2d\") pod \"crc-storage-crc-fzq54\" (UID: \"fb73ca84-6ff7-4719-bea3-b5e9a2195d41\") " pod="crc-storage/crc-storage-crc-fzq54" Jan 04 13:09:32 crc kubenswrapper[5003]: I0104 13:09:32.236503 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/fb73ca84-6ff7-4719-bea3-b5e9a2195d41-node-mnt\") pod \"crc-storage-crc-fzq54\" (UID: \"fb73ca84-6ff7-4719-bea3-b5e9a2195d41\") " pod="crc-storage/crc-storage-crc-fzq54" Jan 04 13:09:32 crc kubenswrapper[5003]: I0104 13:09:32.237467 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/fb73ca84-6ff7-4719-bea3-b5e9a2195d41-crc-storage\") pod \"crc-storage-crc-fzq54\" (UID: \"fb73ca84-6ff7-4719-bea3-b5e9a2195d41\") " pod="crc-storage/crc-storage-crc-fzq54" Jan 04 13:09:32 crc kubenswrapper[5003]: I0104 13:09:32.275929 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-std2d\" (UniqueName: \"kubernetes.io/projected/fb73ca84-6ff7-4719-bea3-b5e9a2195d41-kube-api-access-std2d\") pod \"crc-storage-crc-fzq54\" (UID: \"fb73ca84-6ff7-4719-bea3-b5e9a2195d41\") " pod="crc-storage/crc-storage-crc-fzq54" Jan 04 13:09:32 crc kubenswrapper[5003]: I0104 13:09:32.368257 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-fzq54" Jan 04 13:09:32 crc kubenswrapper[5003]: I0104 13:09:32.817989 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cdb51f-4a24-40a9-9473-1e061b2d4f50" path="/var/lib/kubelet/pods/b6cdb51f-4a24-40a9-9473-1e061b2d4f50/volumes" Jan 04 13:09:32 crc kubenswrapper[5003]: I0104 13:09:32.890857 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-fzq54"] Jan 04 13:09:33 crc kubenswrapper[5003]: I0104 13:09:33.273980 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-fzq54" event={"ID":"fb73ca84-6ff7-4719-bea3-b5e9a2195d41","Type":"ContainerStarted","Data":"8b6510291c7dc7eccd1adb7e59edf68af815bf74af378c71e384a9a8f46b92e3"} Jan 04 13:09:34 crc kubenswrapper[5003]: I0104 13:09:34.282562 5003 generic.go:334] "Generic (PLEG): container finished" podID="fb73ca84-6ff7-4719-bea3-b5e9a2195d41" containerID="dbbae40e4dea0f27c76eeb652758eee490a10d764fd6a40988f8f5e389274081" exitCode=0 Jan 04 13:09:34 crc kubenswrapper[5003]: I0104 13:09:34.282676 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-fzq54" event={"ID":"fb73ca84-6ff7-4719-bea3-b5e9a2195d41","Type":"ContainerDied","Data":"dbbae40e4dea0f27c76eeb652758eee490a10d764fd6a40988f8f5e389274081"} Jan 04 13:09:35 crc kubenswrapper[5003]: I0104 13:09:35.583347 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-fzq54" Jan 04 13:09:35 crc kubenswrapper[5003]: I0104 13:09:35.636473 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-std2d\" (UniqueName: \"kubernetes.io/projected/fb73ca84-6ff7-4719-bea3-b5e9a2195d41-kube-api-access-std2d\") pod \"fb73ca84-6ff7-4719-bea3-b5e9a2195d41\" (UID: \"fb73ca84-6ff7-4719-bea3-b5e9a2195d41\") " Jan 04 13:09:35 crc kubenswrapper[5003]: I0104 13:09:35.636524 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/fb73ca84-6ff7-4719-bea3-b5e9a2195d41-node-mnt\") pod \"fb73ca84-6ff7-4719-bea3-b5e9a2195d41\" (UID: \"fb73ca84-6ff7-4719-bea3-b5e9a2195d41\") " Jan 04 13:09:35 crc kubenswrapper[5003]: I0104 13:09:35.636599 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/fb73ca84-6ff7-4719-bea3-b5e9a2195d41-crc-storage\") pod \"fb73ca84-6ff7-4719-bea3-b5e9a2195d41\" (UID: \"fb73ca84-6ff7-4719-bea3-b5e9a2195d41\") " Jan 04 13:09:35 crc kubenswrapper[5003]: I0104 13:09:35.637266 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fb73ca84-6ff7-4719-bea3-b5e9a2195d41-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "fb73ca84-6ff7-4719-bea3-b5e9a2195d41" (UID: "fb73ca84-6ff7-4719-bea3-b5e9a2195d41"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 13:09:35 crc kubenswrapper[5003]: I0104 13:09:35.642350 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb73ca84-6ff7-4719-bea3-b5e9a2195d41-kube-api-access-std2d" (OuterVolumeSpecName: "kube-api-access-std2d") pod "fb73ca84-6ff7-4719-bea3-b5e9a2195d41" (UID: "fb73ca84-6ff7-4719-bea3-b5e9a2195d41"). InnerVolumeSpecName "kube-api-access-std2d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:09:35 crc kubenswrapper[5003]: I0104 13:09:35.656766 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb73ca84-6ff7-4719-bea3-b5e9a2195d41-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "fb73ca84-6ff7-4719-bea3-b5e9a2195d41" (UID: "fb73ca84-6ff7-4719-bea3-b5e9a2195d41"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 13:09:35 crc kubenswrapper[5003]: I0104 13:09:35.737572 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-std2d\" (UniqueName: \"kubernetes.io/projected/fb73ca84-6ff7-4719-bea3-b5e9a2195d41-kube-api-access-std2d\") on node \"crc\" DevicePath \"\"" Jan 04 13:09:35 crc kubenswrapper[5003]: I0104 13:09:35.737629 5003 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/fb73ca84-6ff7-4719-bea3-b5e9a2195d41-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 04 13:09:35 crc kubenswrapper[5003]: I0104 13:09:35.737652 5003 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/fb73ca84-6ff7-4719-bea3-b5e9a2195d41-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 04 13:09:36 crc kubenswrapper[5003]: I0104 13:09:36.301217 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-fzq54" event={"ID":"fb73ca84-6ff7-4719-bea3-b5e9a2195d41","Type":"ContainerDied","Data":"8b6510291c7dc7eccd1adb7e59edf68af815bf74af378c71e384a9a8f46b92e3"} Jan 04 13:09:36 crc kubenswrapper[5003]: I0104 13:09:36.301625 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8b6510291c7dc7eccd1adb7e59edf68af815bf74af378c71e384a9a8f46b92e3" Jan 04 13:09:36 crc kubenswrapper[5003]: I0104 13:09:36.301290 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-fzq54" Jan 04 13:10:09 crc kubenswrapper[5003]: I0104 13:10:09.418663 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 13:10:09 crc kubenswrapper[5003]: I0104 13:10:09.419470 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 13:10:14 crc kubenswrapper[5003]: I0104 13:10:14.228561 5003 scope.go:117] "RemoveContainer" containerID="cd654310bb0361ddff25b2085b912c3b7be2e424ecec5784f666dfb25191e4c1" Jan 04 13:10:39 crc kubenswrapper[5003]: I0104 13:10:39.418391 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 13:10:39 crc kubenswrapper[5003]: I0104 13:10:39.419684 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 13:11:09 crc kubenswrapper[5003]: I0104 13:11:09.419442 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 13:11:09 crc kubenswrapper[5003]: I0104 13:11:09.420553 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 13:11:09 crc kubenswrapper[5003]: I0104 13:11:09.420639 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 13:11:09 crc kubenswrapper[5003]: I0104 13:11:09.423084 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ab89a3503b868c615a10b0dddcf9602282c1df798b3f18e301d8cf21d5ab3516"} pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 13:11:09 crc kubenswrapper[5003]: I0104 13:11:09.423245 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" containerID="cri-o://ab89a3503b868c615a10b0dddcf9602282c1df798b3f18e301d8cf21d5ab3516" gracePeriod=600 Jan 04 13:11:09 crc kubenswrapper[5003]: E0104 13:11:09.553884 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:11:10 crc kubenswrapper[5003]: I0104 13:11:10.170941 5003 generic.go:334] "Generic (PLEG): container finished" podID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerID="ab89a3503b868c615a10b0dddcf9602282c1df798b3f18e301d8cf21d5ab3516" exitCode=0 Jan 04 13:11:10 crc kubenswrapper[5003]: I0104 13:11:10.171038 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerDied","Data":"ab89a3503b868c615a10b0dddcf9602282c1df798b3f18e301d8cf21d5ab3516"} Jan 04 13:11:10 crc kubenswrapper[5003]: I0104 13:11:10.171108 5003 scope.go:117] "RemoveContainer" containerID="d975fa17306542a8366b2b2ca8fac560784ac02e83ac35bd1f2e4557aff34a56" Jan 04 13:11:10 crc kubenswrapper[5003]: I0104 13:11:10.172370 5003 scope.go:117] "RemoveContainer" containerID="ab89a3503b868c615a10b0dddcf9602282c1df798b3f18e301d8cf21d5ab3516" Jan 04 13:11:10 crc kubenswrapper[5003]: E0104 13:11:10.173089 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:11:21 crc kubenswrapper[5003]: I0104 13:11:21.807557 5003 scope.go:117] "RemoveContainer" containerID="ab89a3503b868c615a10b0dddcf9602282c1df798b3f18e301d8cf21d5ab3516" Jan 04 13:11:21 crc kubenswrapper[5003]: E0104 13:11:21.808832 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:11:33 crc kubenswrapper[5003]: I0104 13:11:33.807255 5003 scope.go:117] "RemoveContainer" containerID="ab89a3503b868c615a10b0dddcf9602282c1df798b3f18e301d8cf21d5ab3516" Jan 04 13:11:33 crc kubenswrapper[5003]: E0104 13:11:33.808579 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:11:46 crc kubenswrapper[5003]: I0104 13:11:46.807480 5003 scope.go:117] "RemoveContainer" containerID="ab89a3503b868c615a10b0dddcf9602282c1df798b3f18e301d8cf21d5ab3516" Jan 04 13:11:46 crc kubenswrapper[5003]: E0104 13:11:46.808493 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.430433 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5986db9b4f-mbwmz"] Jan 04 13:11:54 crc kubenswrapper[5003]: E0104 13:11:54.431092 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb73ca84-6ff7-4719-bea3-b5e9a2195d41" containerName="storage" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.431107 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb73ca84-6ff7-4719-bea3-b5e9a2195d41" containerName="storage" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.431243 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb73ca84-6ff7-4719-bea3-b5e9a2195d41" containerName="storage" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.431983 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5986db9b4f-mbwmz" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.438581 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-jvphn" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.438782 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.438841 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.438929 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.447712 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-56bbd59dc5-5j9d5"] Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.448996 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56bbd59dc5-5j9d5" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.451980 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.452485 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5986db9b4f-mbwmz"] Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.462505 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rf6b\" (UniqueName: \"kubernetes.io/projected/6885c707-693a-4af7-a082-46b2e070d0ed-kube-api-access-5rf6b\") pod \"dnsmasq-dns-5986db9b4f-mbwmz\" (UID: \"6885c707-693a-4af7-a082-46b2e070d0ed\") " pod="openstack/dnsmasq-dns-5986db9b4f-mbwmz" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.462565 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3efa2d4a-3172-4b0e-8b22-a5d33aec6e32-dns-svc\") pod \"dnsmasq-dns-56bbd59dc5-5j9d5\" (UID: \"3efa2d4a-3172-4b0e-8b22-a5d33aec6e32\") " pod="openstack/dnsmasq-dns-56bbd59dc5-5j9d5" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.462596 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6885c707-693a-4af7-a082-46b2e070d0ed-config\") pod \"dnsmasq-dns-5986db9b4f-mbwmz\" (UID: \"6885c707-693a-4af7-a082-46b2e070d0ed\") " pod="openstack/dnsmasq-dns-5986db9b4f-mbwmz" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.462660 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3efa2d4a-3172-4b0e-8b22-a5d33aec6e32-config\") pod \"dnsmasq-dns-56bbd59dc5-5j9d5\" (UID: \"3efa2d4a-3172-4b0e-8b22-a5d33aec6e32\") " pod="openstack/dnsmasq-dns-56bbd59dc5-5j9d5" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.462717 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7l4g2\" (UniqueName: \"kubernetes.io/projected/3efa2d4a-3172-4b0e-8b22-a5d33aec6e32-kube-api-access-7l4g2\") pod \"dnsmasq-dns-56bbd59dc5-5j9d5\" (UID: \"3efa2d4a-3172-4b0e-8b22-a5d33aec6e32\") " pod="openstack/dnsmasq-dns-56bbd59dc5-5j9d5" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.474398 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56bbd59dc5-5j9d5"] Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.564233 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rf6b\" (UniqueName: \"kubernetes.io/projected/6885c707-693a-4af7-a082-46b2e070d0ed-kube-api-access-5rf6b\") pod \"dnsmasq-dns-5986db9b4f-mbwmz\" (UID: \"6885c707-693a-4af7-a082-46b2e070d0ed\") " pod="openstack/dnsmasq-dns-5986db9b4f-mbwmz" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.564281 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3efa2d4a-3172-4b0e-8b22-a5d33aec6e32-dns-svc\") pod \"dnsmasq-dns-56bbd59dc5-5j9d5\" (UID: \"3efa2d4a-3172-4b0e-8b22-a5d33aec6e32\") " pod="openstack/dnsmasq-dns-56bbd59dc5-5j9d5" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.564307 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6885c707-693a-4af7-a082-46b2e070d0ed-config\") pod \"dnsmasq-dns-5986db9b4f-mbwmz\" (UID: \"6885c707-693a-4af7-a082-46b2e070d0ed\") " pod="openstack/dnsmasq-dns-5986db9b4f-mbwmz" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.564366 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3efa2d4a-3172-4b0e-8b22-a5d33aec6e32-config\") pod \"dnsmasq-dns-56bbd59dc5-5j9d5\" (UID: \"3efa2d4a-3172-4b0e-8b22-a5d33aec6e32\") " pod="openstack/dnsmasq-dns-56bbd59dc5-5j9d5" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.564391 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7l4g2\" (UniqueName: \"kubernetes.io/projected/3efa2d4a-3172-4b0e-8b22-a5d33aec6e32-kube-api-access-7l4g2\") pod \"dnsmasq-dns-56bbd59dc5-5j9d5\" (UID: \"3efa2d4a-3172-4b0e-8b22-a5d33aec6e32\") " pod="openstack/dnsmasq-dns-56bbd59dc5-5j9d5" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.565682 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3efa2d4a-3172-4b0e-8b22-a5d33aec6e32-dns-svc\") pod \"dnsmasq-dns-56bbd59dc5-5j9d5\" (UID: \"3efa2d4a-3172-4b0e-8b22-a5d33aec6e32\") " pod="openstack/dnsmasq-dns-56bbd59dc5-5j9d5" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.565683 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3efa2d4a-3172-4b0e-8b22-a5d33aec6e32-config\") pod \"dnsmasq-dns-56bbd59dc5-5j9d5\" (UID: \"3efa2d4a-3172-4b0e-8b22-a5d33aec6e32\") " pod="openstack/dnsmasq-dns-56bbd59dc5-5j9d5" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.567163 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6885c707-693a-4af7-a082-46b2e070d0ed-config\") pod \"dnsmasq-dns-5986db9b4f-mbwmz\" (UID: \"6885c707-693a-4af7-a082-46b2e070d0ed\") " pod="openstack/dnsmasq-dns-5986db9b4f-mbwmz" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.589920 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7l4g2\" (UniqueName: \"kubernetes.io/projected/3efa2d4a-3172-4b0e-8b22-a5d33aec6e32-kube-api-access-7l4g2\") pod \"dnsmasq-dns-56bbd59dc5-5j9d5\" (UID: \"3efa2d4a-3172-4b0e-8b22-a5d33aec6e32\") " pod="openstack/dnsmasq-dns-56bbd59dc5-5j9d5" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.591993 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rf6b\" (UniqueName: \"kubernetes.io/projected/6885c707-693a-4af7-a082-46b2e070d0ed-kube-api-access-5rf6b\") pod \"dnsmasq-dns-5986db9b4f-mbwmz\" (UID: \"6885c707-693a-4af7-a082-46b2e070d0ed\") " pod="openstack/dnsmasq-dns-5986db9b4f-mbwmz" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.719093 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56bbd59dc5-5j9d5"] Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.719948 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56bbd59dc5-5j9d5" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.752403 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5986db9b4f-mbwmz" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.771285 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-865d9b578f-k8bmn"] Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.772590 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865d9b578f-k8bmn" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.787455 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-865d9b578f-k8bmn"] Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.873556 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bl4n9\" (UniqueName: \"kubernetes.io/projected/7e71e91a-dc90-44c0-a951-3b307204ecd0-kube-api-access-bl4n9\") pod \"dnsmasq-dns-865d9b578f-k8bmn\" (UID: \"7e71e91a-dc90-44c0-a951-3b307204ecd0\") " pod="openstack/dnsmasq-dns-865d9b578f-k8bmn" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.873690 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e71e91a-dc90-44c0-a951-3b307204ecd0-config\") pod \"dnsmasq-dns-865d9b578f-k8bmn\" (UID: \"7e71e91a-dc90-44c0-a951-3b307204ecd0\") " pod="openstack/dnsmasq-dns-865d9b578f-k8bmn" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.873720 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7e71e91a-dc90-44c0-a951-3b307204ecd0-dns-svc\") pod \"dnsmasq-dns-865d9b578f-k8bmn\" (UID: \"7e71e91a-dc90-44c0-a951-3b307204ecd0\") " pod="openstack/dnsmasq-dns-865d9b578f-k8bmn" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.975012 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bl4n9\" (UniqueName: \"kubernetes.io/projected/7e71e91a-dc90-44c0-a951-3b307204ecd0-kube-api-access-bl4n9\") pod \"dnsmasq-dns-865d9b578f-k8bmn\" (UID: \"7e71e91a-dc90-44c0-a951-3b307204ecd0\") " pod="openstack/dnsmasq-dns-865d9b578f-k8bmn" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.976426 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e71e91a-dc90-44c0-a951-3b307204ecd0-config\") pod \"dnsmasq-dns-865d9b578f-k8bmn\" (UID: \"7e71e91a-dc90-44c0-a951-3b307204ecd0\") " pod="openstack/dnsmasq-dns-865d9b578f-k8bmn" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.976458 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7e71e91a-dc90-44c0-a951-3b307204ecd0-dns-svc\") pod \"dnsmasq-dns-865d9b578f-k8bmn\" (UID: \"7e71e91a-dc90-44c0-a951-3b307204ecd0\") " pod="openstack/dnsmasq-dns-865d9b578f-k8bmn" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.978119 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7e71e91a-dc90-44c0-a951-3b307204ecd0-dns-svc\") pod \"dnsmasq-dns-865d9b578f-k8bmn\" (UID: \"7e71e91a-dc90-44c0-a951-3b307204ecd0\") " pod="openstack/dnsmasq-dns-865d9b578f-k8bmn" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.978278 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e71e91a-dc90-44c0-a951-3b307204ecd0-config\") pod \"dnsmasq-dns-865d9b578f-k8bmn\" (UID: \"7e71e91a-dc90-44c0-a951-3b307204ecd0\") " pod="openstack/dnsmasq-dns-865d9b578f-k8bmn" Jan 04 13:11:54 crc kubenswrapper[5003]: I0104 13:11:54.996250 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bl4n9\" (UniqueName: \"kubernetes.io/projected/7e71e91a-dc90-44c0-a951-3b307204ecd0-kube-api-access-bl4n9\") pod \"dnsmasq-dns-865d9b578f-k8bmn\" (UID: \"7e71e91a-dc90-44c0-a951-3b307204ecd0\") " pod="openstack/dnsmasq-dns-865d9b578f-k8bmn" Jan 04 13:11:55 crc kubenswrapper[5003]: I0104 13:11:55.205584 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865d9b578f-k8bmn" Jan 04 13:11:55 crc kubenswrapper[5003]: I0104 13:11:55.304710 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56bbd59dc5-5j9d5"] Jan 04 13:11:55 crc kubenswrapper[5003]: I0104 13:11:55.368527 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5986db9b4f-mbwmz"] Jan 04 13:11:55 crc kubenswrapper[5003]: I0104 13:11:55.470511 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-865d9b578f-k8bmn"] Jan 04 13:11:55 crc kubenswrapper[5003]: I0104 13:11:55.565943 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5986db9b4f-mbwmz"] Jan 04 13:11:55 crc kubenswrapper[5003]: I0104 13:11:55.619706 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56bbd59dc5-5j9d5" event={"ID":"3efa2d4a-3172-4b0e-8b22-a5d33aec6e32","Type":"ContainerStarted","Data":"315075f0311060c3b7e9abe977300a52fa32d41229143aced09501fb336f3a94"} Jan 04 13:11:55 crc kubenswrapper[5003]: I0104 13:11:55.621023 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5d79f765b5-72qsj"] Jan 04 13:11:55 crc kubenswrapper[5003]: I0104 13:11:55.622538 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d79f765b5-72qsj" Jan 04 13:11:55 crc kubenswrapper[5003]: I0104 13:11:55.634285 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865d9b578f-k8bmn" event={"ID":"7e71e91a-dc90-44c0-a951-3b307204ecd0","Type":"ContainerStarted","Data":"801f6e3eeddfb78489d85fd20e676b63411783ecc85ede707d0d6af80e550f29"} Jan 04 13:11:55 crc kubenswrapper[5003]: I0104 13:11:55.636197 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5986db9b4f-mbwmz" event={"ID":"6885c707-693a-4af7-a082-46b2e070d0ed","Type":"ContainerStarted","Data":"590fad7e00b5ec0118d124a1c8845a4bc1b06973b3c7a82ad6d30593fbf28a7b"} Jan 04 13:11:55 crc kubenswrapper[5003]: I0104 13:11:55.671908 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d79f765b5-72qsj"] Jan 04 13:11:55 crc kubenswrapper[5003]: I0104 13:11:55.801310 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/330d5594-4379-4cb8-80af-0af1c0ff29e8-dns-svc\") pod \"dnsmasq-dns-5d79f765b5-72qsj\" (UID: \"330d5594-4379-4cb8-80af-0af1c0ff29e8\") " pod="openstack/dnsmasq-dns-5d79f765b5-72qsj" Jan 04 13:11:55 crc kubenswrapper[5003]: I0104 13:11:55.801366 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/330d5594-4379-4cb8-80af-0af1c0ff29e8-config\") pod \"dnsmasq-dns-5d79f765b5-72qsj\" (UID: \"330d5594-4379-4cb8-80af-0af1c0ff29e8\") " pod="openstack/dnsmasq-dns-5d79f765b5-72qsj" Jan 04 13:11:55 crc kubenswrapper[5003]: I0104 13:11:55.801410 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzdtv\" (UniqueName: \"kubernetes.io/projected/330d5594-4379-4cb8-80af-0af1c0ff29e8-kube-api-access-jzdtv\") pod \"dnsmasq-dns-5d79f765b5-72qsj\" (UID: \"330d5594-4379-4cb8-80af-0af1c0ff29e8\") " pod="openstack/dnsmasq-dns-5d79f765b5-72qsj" Jan 04 13:11:55 crc kubenswrapper[5003]: I0104 13:11:55.897838 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 04 13:11:55 crc kubenswrapper[5003]: I0104 13:11:55.898948 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:55 crc kubenswrapper[5003]: I0104 13:11:55.902669 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/330d5594-4379-4cb8-80af-0af1c0ff29e8-config\") pod \"dnsmasq-dns-5d79f765b5-72qsj\" (UID: \"330d5594-4379-4cb8-80af-0af1c0ff29e8\") " pod="openstack/dnsmasq-dns-5d79f765b5-72qsj" Jan 04 13:11:55 crc kubenswrapper[5003]: I0104 13:11:55.902998 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzdtv\" (UniqueName: \"kubernetes.io/projected/330d5594-4379-4cb8-80af-0af1c0ff29e8-kube-api-access-jzdtv\") pod \"dnsmasq-dns-5d79f765b5-72qsj\" (UID: \"330d5594-4379-4cb8-80af-0af1c0ff29e8\") " pod="openstack/dnsmasq-dns-5d79f765b5-72qsj" Jan 04 13:11:55 crc kubenswrapper[5003]: I0104 13:11:55.903258 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/330d5594-4379-4cb8-80af-0af1c0ff29e8-config\") pod \"dnsmasq-dns-5d79f765b5-72qsj\" (UID: \"330d5594-4379-4cb8-80af-0af1c0ff29e8\") " pod="openstack/dnsmasq-dns-5d79f765b5-72qsj" Jan 04 13:11:55 crc kubenswrapper[5003]: I0104 13:11:55.903826 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/330d5594-4379-4cb8-80af-0af1c0ff29e8-dns-svc\") pod \"dnsmasq-dns-5d79f765b5-72qsj\" (UID: \"330d5594-4379-4cb8-80af-0af1c0ff29e8\") " pod="openstack/dnsmasq-dns-5d79f765b5-72qsj" Jan 04 13:11:55 crc kubenswrapper[5003]: I0104 13:11:55.904143 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Jan 04 13:11:55 crc kubenswrapper[5003]: I0104 13:11:55.904157 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Jan 04 13:11:55 crc kubenswrapper[5003]: I0104 13:11:55.904231 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Jan 04 13:11:55 crc kubenswrapper[5003]: I0104 13:11:55.904278 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/330d5594-4379-4cb8-80af-0af1c0ff29e8-dns-svc\") pod \"dnsmasq-dns-5d79f765b5-72qsj\" (UID: \"330d5594-4379-4cb8-80af-0af1c0ff29e8\") " pod="openstack/dnsmasq-dns-5d79f765b5-72qsj" Jan 04 13:11:55 crc kubenswrapper[5003]: I0104 13:11:55.904494 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Jan 04 13:11:55 crc kubenswrapper[5003]: I0104 13:11:55.904560 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Jan 04 13:11:55 crc kubenswrapper[5003]: I0104 13:11:55.904729 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Jan 04 13:11:55 crc kubenswrapper[5003]: I0104 13:11:55.911002 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 04 13:11:55 crc kubenswrapper[5003]: I0104 13:11:55.914167 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-wllz4" Jan 04 13:11:55 crc kubenswrapper[5003]: I0104 13:11:55.927669 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzdtv\" (UniqueName: \"kubernetes.io/projected/330d5594-4379-4cb8-80af-0af1c0ff29e8-kube-api-access-jzdtv\") pod \"dnsmasq-dns-5d79f765b5-72qsj\" (UID: \"330d5594-4379-4cb8-80af-0af1c0ff29e8\") " pod="openstack/dnsmasq-dns-5d79f765b5-72qsj" Jan 04 13:11:55 crc kubenswrapper[5003]: I0104 13:11:55.966885 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d79f765b5-72qsj" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.005743 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/891e210d-2fe4-4439-905f-e36b8e427eb8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.006160 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdxfl\" (UniqueName: \"kubernetes.io/projected/891e210d-2fe4-4439-905f-e36b8e427eb8-kube-api-access-cdxfl\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.006196 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/891e210d-2fe4-4439-905f-e36b8e427eb8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.006217 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/891e210d-2fe4-4439-905f-e36b8e427eb8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.006234 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/891e210d-2fe4-4439-905f-e36b8e427eb8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.006250 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/891e210d-2fe4-4439-905f-e36b8e427eb8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.006282 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-5b85a27d-231a-48e1-b725-5f24c41608d1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5b85a27d-231a-48e1-b725-5f24c41608d1\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.006320 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/891e210d-2fe4-4439-905f-e36b8e427eb8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.006346 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/891e210d-2fe4-4439-905f-e36b8e427eb8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.006369 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/891e210d-2fe4-4439-905f-e36b8e427eb8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.006404 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/891e210d-2fe4-4439-905f-e36b8e427eb8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.108161 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-5b85a27d-231a-48e1-b725-5f24c41608d1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5b85a27d-231a-48e1-b725-5f24c41608d1\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.108236 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/891e210d-2fe4-4439-905f-e36b8e427eb8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.108265 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/891e210d-2fe4-4439-905f-e36b8e427eb8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.108291 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/891e210d-2fe4-4439-905f-e36b8e427eb8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.108340 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/891e210d-2fe4-4439-905f-e36b8e427eb8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.108396 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/891e210d-2fe4-4439-905f-e36b8e427eb8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.108431 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdxfl\" (UniqueName: \"kubernetes.io/projected/891e210d-2fe4-4439-905f-e36b8e427eb8-kube-api-access-cdxfl\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.108465 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/891e210d-2fe4-4439-905f-e36b8e427eb8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.108487 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/891e210d-2fe4-4439-905f-e36b8e427eb8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.108506 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/891e210d-2fe4-4439-905f-e36b8e427eb8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.108525 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/891e210d-2fe4-4439-905f-e36b8e427eb8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.112409 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/891e210d-2fe4-4439-905f-e36b8e427eb8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.112789 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/891e210d-2fe4-4439-905f-e36b8e427eb8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.113441 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/891e210d-2fe4-4439-905f-e36b8e427eb8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.113637 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/891e210d-2fe4-4439-905f-e36b8e427eb8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.114621 5003 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.114661 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-5b85a27d-231a-48e1-b725-5f24c41608d1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5b85a27d-231a-48e1-b725-5f24c41608d1\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/6c04eb50fedee20aea88220143cc7b3f6050cc4ad553ccffe048ae9026fa180c/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.118122 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/891e210d-2fe4-4439-905f-e36b8e427eb8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.118236 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/891e210d-2fe4-4439-905f-e36b8e427eb8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.119646 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/891e210d-2fe4-4439-905f-e36b8e427eb8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.119683 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/891e210d-2fe4-4439-905f-e36b8e427eb8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.127763 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/891e210d-2fe4-4439-905f-e36b8e427eb8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.138166 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdxfl\" (UniqueName: \"kubernetes.io/projected/891e210d-2fe4-4439-905f-e36b8e427eb8-kube-api-access-cdxfl\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.159388 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-5b85a27d-231a-48e1-b725-5f24c41608d1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5b85a27d-231a-48e1-b725-5f24c41608d1\") pod \"rabbitmq-cell1-server-0\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.214603 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d79f765b5-72qsj"] Jan 04 13:11:56 crc kubenswrapper[5003]: W0104 13:11:56.215088 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod330d5594_4379_4cb8_80af_0af1c0ff29e8.slice/crio-df8abc7270dbb53778dc819b87ca09da67b55049b76b24f0ea3fa28f21a2ba3b WatchSource:0}: Error finding container df8abc7270dbb53778dc819b87ca09da67b55049b76b24f0ea3fa28f21a2ba3b: Status 404 returned error can't find the container with id df8abc7270dbb53778dc819b87ca09da67b55049b76b24f0ea3fa28f21a2ba3b Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.368233 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.646359 5003 generic.go:334] "Generic (PLEG): container finished" podID="6885c707-693a-4af7-a082-46b2e070d0ed" containerID="def33478b0e874016532e007bbfe89a4426c101779df26ff48e0cb498a7ed51c" exitCode=0 Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.646471 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5986db9b4f-mbwmz" event={"ID":"6885c707-693a-4af7-a082-46b2e070d0ed","Type":"ContainerDied","Data":"def33478b0e874016532e007bbfe89a4426c101779df26ff48e0cb498a7ed51c"} Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.649958 5003 generic.go:334] "Generic (PLEG): container finished" podID="3efa2d4a-3172-4b0e-8b22-a5d33aec6e32" containerID="9fe1f7d1e9770792ffe0dc4fbea5baf27ad95802d2d625759118eebfcd6ca328" exitCode=0 Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.650049 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56bbd59dc5-5j9d5" event={"ID":"3efa2d4a-3172-4b0e-8b22-a5d33aec6e32","Type":"ContainerDied","Data":"9fe1f7d1e9770792ffe0dc4fbea5baf27ad95802d2d625759118eebfcd6ca328"} Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.651307 5003 generic.go:334] "Generic (PLEG): container finished" podID="7e71e91a-dc90-44c0-a951-3b307204ecd0" containerID="d87d80e72f64c823940c458b76e7cf8d37b4ebca986013368dbd2a20686d93dd" exitCode=0 Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.651384 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865d9b578f-k8bmn" event={"ID":"7e71e91a-dc90-44c0-a951-3b307204ecd0","Type":"ContainerDied","Data":"d87d80e72f64c823940c458b76e7cf8d37b4ebca986013368dbd2a20686d93dd"} Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.657340 5003 generic.go:334] "Generic (PLEG): container finished" podID="330d5594-4379-4cb8-80af-0af1c0ff29e8" containerID="94f0fe0081cc4f496813295eeea943deebc36c72838bfe88ad923b7793573448" exitCode=0 Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.657801 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d79f765b5-72qsj" event={"ID":"330d5594-4379-4cb8-80af-0af1c0ff29e8","Type":"ContainerDied","Data":"94f0fe0081cc4f496813295eeea943deebc36c72838bfe88ad923b7793573448"} Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.657832 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d79f765b5-72qsj" event={"ID":"330d5594-4379-4cb8-80af-0af1c0ff29e8","Type":"ContainerStarted","Data":"df8abc7270dbb53778dc819b87ca09da67b55049b76b24f0ea3fa28f21a2ba3b"} Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.753438 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.754745 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.757868 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.758405 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.767800 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.768227 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.768653 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.768939 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.769343 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-h58fc" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.769525 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.842924 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.944979 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3a5ca80d-195f-4290-8db3-79ebbe58387c-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.945043 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3a5ca80d-195f-4290-8db3-79ebbe58387c-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.945077 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-352d767c-80b9-450f-952a-c5d59cfb3e97\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-352d767c-80b9-450f-952a-c5d59cfb3e97\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.945123 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3a5ca80d-195f-4290-8db3-79ebbe58387c-server-conf\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.945155 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3a5ca80d-195f-4290-8db3-79ebbe58387c-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.945208 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3a5ca80d-195f-4290-8db3-79ebbe58387c-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.945294 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3a5ca80d-195f-4290-8db3-79ebbe58387c-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.945327 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9frxp\" (UniqueName: \"kubernetes.io/projected/3a5ca80d-195f-4290-8db3-79ebbe58387c-kube-api-access-9frxp\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.945624 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3a5ca80d-195f-4290-8db3-79ebbe58387c-pod-info\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.945652 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3a5ca80d-195f-4290-8db3-79ebbe58387c-config-data\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: I0104 13:11:56.945681 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3a5ca80d-195f-4290-8db3-79ebbe58387c-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:56 crc kubenswrapper[5003]: E0104 13:11:56.969642 5003 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Jan 04 13:11:56 crc kubenswrapper[5003]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/7e71e91a-dc90-44c0-a951-3b307204ecd0/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Jan 04 13:11:56 crc kubenswrapper[5003]: > podSandboxID="801f6e3eeddfb78489d85fd20e676b63411783ecc85ede707d0d6af80e550f29" Jan 04 13:11:56 crc kubenswrapper[5003]: E0104 13:11:56.969986 5003 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 04 13:11:56 crc kubenswrapper[5003]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:ea0bf67f1aa5d95a9a07b9c8692c293470f1311792c55d3d57f1f92e56689c33,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nb6hc5h68h68h594h659hdbh679h65ch5f6hdch6h5b9h8fh55hfhf8h57fhc7h56ch687h669h559h678h5dhc7hf7h697h5d6h9ch669h54fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bl4n9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-865d9b578f-k8bmn_openstack(7e71e91a-dc90-44c0-a951-3b307204ecd0): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/7e71e91a-dc90-44c0-a951-3b307204ecd0/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Jan 04 13:11:56 crc kubenswrapper[5003]: > logger="UnhandledError" Jan 04 13:11:56 crc kubenswrapper[5003]: E0104 13:11:56.972223 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/7e71e91a-dc90-44c0-a951-3b307204ecd0/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-865d9b578f-k8bmn" podUID="7e71e91a-dc90-44c0-a951-3b307204ecd0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.038518 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56bbd59dc5-5j9d5" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.046796 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-352d767c-80b9-450f-952a-c5d59cfb3e97\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-352d767c-80b9-450f-952a-c5d59cfb3e97\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.046849 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3a5ca80d-195f-4290-8db3-79ebbe58387c-server-conf\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.046877 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3a5ca80d-195f-4290-8db3-79ebbe58387c-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.046916 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3a5ca80d-195f-4290-8db3-79ebbe58387c-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.046947 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3a5ca80d-195f-4290-8db3-79ebbe58387c-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.046968 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9frxp\" (UniqueName: \"kubernetes.io/projected/3a5ca80d-195f-4290-8db3-79ebbe58387c-kube-api-access-9frxp\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.046999 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3a5ca80d-195f-4290-8db3-79ebbe58387c-pod-info\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.047036 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3a5ca80d-195f-4290-8db3-79ebbe58387c-config-data\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.047058 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3a5ca80d-195f-4290-8db3-79ebbe58387c-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.047083 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3a5ca80d-195f-4290-8db3-79ebbe58387c-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.047104 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3a5ca80d-195f-4290-8db3-79ebbe58387c-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.048411 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3a5ca80d-195f-4290-8db3-79ebbe58387c-config-data\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.048576 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3a5ca80d-195f-4290-8db3-79ebbe58387c-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.048838 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3a5ca80d-195f-4290-8db3-79ebbe58387c-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.049386 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3a5ca80d-195f-4290-8db3-79ebbe58387c-server-conf\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.050169 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3a5ca80d-195f-4290-8db3-79ebbe58387c-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.051104 5003 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.051138 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-352d767c-80b9-450f-952a-c5d59cfb3e97\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-352d767c-80b9-450f-952a-c5d59cfb3e97\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/d6efa95f8d59fc20d38a71435bb191e9af886aae1d88deb62d6e46d114b2a718/globalmount\"" pod="openstack/rabbitmq-server-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.055655 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3a5ca80d-195f-4290-8db3-79ebbe58387c-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.055704 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3a5ca80d-195f-4290-8db3-79ebbe58387c-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.058883 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3a5ca80d-195f-4290-8db3-79ebbe58387c-pod-info\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.063629 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3a5ca80d-195f-4290-8db3-79ebbe58387c-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.067802 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9frxp\" (UniqueName: \"kubernetes.io/projected/3a5ca80d-195f-4290-8db3-79ebbe58387c-kube-api-access-9frxp\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.088115 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-352d767c-80b9-450f-952a-c5d59cfb3e97\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-352d767c-80b9-450f-952a-c5d59cfb3e97\") pod \"rabbitmq-server-0\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " pod="openstack/rabbitmq-server-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.092263 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5986db9b4f-mbwmz" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.148096 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3efa2d4a-3172-4b0e-8b22-a5d33aec6e32-config\") pod \"3efa2d4a-3172-4b0e-8b22-a5d33aec6e32\" (UID: \"3efa2d4a-3172-4b0e-8b22-a5d33aec6e32\") " Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.148225 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3efa2d4a-3172-4b0e-8b22-a5d33aec6e32-dns-svc\") pod \"3efa2d4a-3172-4b0e-8b22-a5d33aec6e32\" (UID: \"3efa2d4a-3172-4b0e-8b22-a5d33aec6e32\") " Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.148463 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7l4g2\" (UniqueName: \"kubernetes.io/projected/3efa2d4a-3172-4b0e-8b22-a5d33aec6e32-kube-api-access-7l4g2\") pod \"3efa2d4a-3172-4b0e-8b22-a5d33aec6e32\" (UID: \"3efa2d4a-3172-4b0e-8b22-a5d33aec6e32\") " Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.153544 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3efa2d4a-3172-4b0e-8b22-a5d33aec6e32-kube-api-access-7l4g2" (OuterVolumeSpecName: "kube-api-access-7l4g2") pod "3efa2d4a-3172-4b0e-8b22-a5d33aec6e32" (UID: "3efa2d4a-3172-4b0e-8b22-a5d33aec6e32"). InnerVolumeSpecName "kube-api-access-7l4g2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.167663 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3efa2d4a-3172-4b0e-8b22-a5d33aec6e32-config" (OuterVolumeSpecName: "config") pod "3efa2d4a-3172-4b0e-8b22-a5d33aec6e32" (UID: "3efa2d4a-3172-4b0e-8b22-a5d33aec6e32"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.170883 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3efa2d4a-3172-4b0e-8b22-a5d33aec6e32-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3efa2d4a-3172-4b0e-8b22-a5d33aec6e32" (UID: "3efa2d4a-3172-4b0e-8b22-a5d33aec6e32"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.189403 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.249896 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5rf6b\" (UniqueName: \"kubernetes.io/projected/6885c707-693a-4af7-a082-46b2e070d0ed-kube-api-access-5rf6b\") pod \"6885c707-693a-4af7-a082-46b2e070d0ed\" (UID: \"6885c707-693a-4af7-a082-46b2e070d0ed\") " Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.250063 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6885c707-693a-4af7-a082-46b2e070d0ed-config\") pod \"6885c707-693a-4af7-a082-46b2e070d0ed\" (UID: \"6885c707-693a-4af7-a082-46b2e070d0ed\") " Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.250424 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7l4g2\" (UniqueName: \"kubernetes.io/projected/3efa2d4a-3172-4b0e-8b22-a5d33aec6e32-kube-api-access-7l4g2\") on node \"crc\" DevicePath \"\"" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.250446 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3efa2d4a-3172-4b0e-8b22-a5d33aec6e32-config\") on node \"crc\" DevicePath \"\"" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.250460 5003 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3efa2d4a-3172-4b0e-8b22-a5d33aec6e32-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.253479 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6885c707-693a-4af7-a082-46b2e070d0ed-kube-api-access-5rf6b" (OuterVolumeSpecName: "kube-api-access-5rf6b") pod "6885c707-693a-4af7-a082-46b2e070d0ed" (UID: "6885c707-693a-4af7-a082-46b2e070d0ed"). InnerVolumeSpecName "kube-api-access-5rf6b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.279803 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6885c707-693a-4af7-a082-46b2e070d0ed-config" (OuterVolumeSpecName: "config") pod "6885c707-693a-4af7-a082-46b2e070d0ed" (UID: "6885c707-693a-4af7-a082-46b2e070d0ed"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.352485 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5rf6b\" (UniqueName: \"kubernetes.io/projected/6885c707-693a-4af7-a082-46b2e070d0ed-kube-api-access-5rf6b\") on node \"crc\" DevicePath \"\"" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.352524 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6885c707-693a-4af7-a082-46b2e070d0ed-config\") on node \"crc\" DevicePath \"\"" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.388886 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Jan 04 13:11:57 crc kubenswrapper[5003]: E0104 13:11:57.389266 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3efa2d4a-3172-4b0e-8b22-a5d33aec6e32" containerName="init" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.389285 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="3efa2d4a-3172-4b0e-8b22-a5d33aec6e32" containerName="init" Jan 04 13:11:57 crc kubenswrapper[5003]: E0104 13:11:57.389303 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6885c707-693a-4af7-a082-46b2e070d0ed" containerName="init" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.389310 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="6885c707-693a-4af7-a082-46b2e070d0ed" containerName="init" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.389455 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="6885c707-693a-4af7-a082-46b2e070d0ed" containerName="init" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.389473 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="3efa2d4a-3172-4b0e-8b22-a5d33aec6e32" containerName="init" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.390222 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.393350 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-67886" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.400543 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.401551 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.401673 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.408452 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.415905 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.554682 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c48613d9-a038-4293-855b-6d05642cc386-kolla-config\") pod \"openstack-galera-0\" (UID: \"c48613d9-a038-4293-855b-6d05642cc386\") " pod="openstack/openstack-galera-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.554764 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-7adac5a9-4bee-452a-8dd4-2fbc8f2afee3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7adac5a9-4bee-452a-8dd4-2fbc8f2afee3\") pod \"openstack-galera-0\" (UID: \"c48613d9-a038-4293-855b-6d05642cc386\") " pod="openstack/openstack-galera-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.555132 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c48613d9-a038-4293-855b-6d05642cc386-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c48613d9-a038-4293-855b-6d05642cc386\") " pod="openstack/openstack-galera-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.555209 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzjjf\" (UniqueName: \"kubernetes.io/projected/c48613d9-a038-4293-855b-6d05642cc386-kube-api-access-fzjjf\") pod \"openstack-galera-0\" (UID: \"c48613d9-a038-4293-855b-6d05642cc386\") " pod="openstack/openstack-galera-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.555353 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c48613d9-a038-4293-855b-6d05642cc386-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c48613d9-a038-4293-855b-6d05642cc386\") " pod="openstack/openstack-galera-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.555384 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c48613d9-a038-4293-855b-6d05642cc386-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c48613d9-a038-4293-855b-6d05642cc386\") " pod="openstack/openstack-galera-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.555420 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c48613d9-a038-4293-855b-6d05642cc386-config-data-default\") pod \"openstack-galera-0\" (UID: \"c48613d9-a038-4293-855b-6d05642cc386\") " pod="openstack/openstack-galera-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.555443 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c48613d9-a038-4293-855b-6d05642cc386-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c48613d9-a038-4293-855b-6d05642cc386\") " pod="openstack/openstack-galera-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.645063 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 04 13:11:57 crc kubenswrapper[5003]: W0104 13:11:57.651244 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3a5ca80d_195f_4290_8db3_79ebbe58387c.slice/crio-29edd86384a1c0b5e7a569210efa9efc107b4461d83045399ae946d5f9d88848 WatchSource:0}: Error finding container 29edd86384a1c0b5e7a569210efa9efc107b4461d83045399ae946d5f9d88848: Status 404 returned error can't find the container with id 29edd86384a1c0b5e7a569210efa9efc107b4461d83045399ae946d5f9d88848 Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.658526 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c48613d9-a038-4293-855b-6d05642cc386-config-data-default\") pod \"openstack-galera-0\" (UID: \"c48613d9-a038-4293-855b-6d05642cc386\") " pod="openstack/openstack-galera-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.662902 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c48613d9-a038-4293-855b-6d05642cc386-config-data-default\") pod \"openstack-galera-0\" (UID: \"c48613d9-a038-4293-855b-6d05642cc386\") " pod="openstack/openstack-galera-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.663129 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c48613d9-a038-4293-855b-6d05642cc386-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c48613d9-a038-4293-855b-6d05642cc386\") " pod="openstack/openstack-galera-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.663186 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c48613d9-a038-4293-855b-6d05642cc386-kolla-config\") pod \"openstack-galera-0\" (UID: \"c48613d9-a038-4293-855b-6d05642cc386\") " pod="openstack/openstack-galera-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.663250 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-7adac5a9-4bee-452a-8dd4-2fbc8f2afee3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7adac5a9-4bee-452a-8dd4-2fbc8f2afee3\") pod \"openstack-galera-0\" (UID: \"c48613d9-a038-4293-855b-6d05642cc386\") " pod="openstack/openstack-galera-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.663500 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c48613d9-a038-4293-855b-6d05642cc386-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c48613d9-a038-4293-855b-6d05642cc386\") " pod="openstack/openstack-galera-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.663547 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzjjf\" (UniqueName: \"kubernetes.io/projected/c48613d9-a038-4293-855b-6d05642cc386-kube-api-access-fzjjf\") pod \"openstack-galera-0\" (UID: \"c48613d9-a038-4293-855b-6d05642cc386\") " pod="openstack/openstack-galera-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.663680 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c48613d9-a038-4293-855b-6d05642cc386-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c48613d9-a038-4293-855b-6d05642cc386\") " pod="openstack/openstack-galera-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.663706 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c48613d9-a038-4293-855b-6d05642cc386-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c48613d9-a038-4293-855b-6d05642cc386\") " pod="openstack/openstack-galera-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.667575 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c48613d9-a038-4293-855b-6d05642cc386-kolla-config\") pod \"openstack-galera-0\" (UID: \"c48613d9-a038-4293-855b-6d05642cc386\") " pod="openstack/openstack-galera-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.669880 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c48613d9-a038-4293-855b-6d05642cc386-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c48613d9-a038-4293-855b-6d05642cc386\") " pod="openstack/openstack-galera-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.668915 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c48613d9-a038-4293-855b-6d05642cc386-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c48613d9-a038-4293-855b-6d05642cc386\") " pod="openstack/openstack-galera-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.672452 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c48613d9-a038-4293-855b-6d05642cc386-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c48613d9-a038-4293-855b-6d05642cc386\") " pod="openstack/openstack-galera-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.675223 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c48613d9-a038-4293-855b-6d05642cc386-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c48613d9-a038-4293-855b-6d05642cc386\") " pod="openstack/openstack-galera-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.682679 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"891e210d-2fe4-4439-905f-e36b8e427eb8","Type":"ContainerStarted","Data":"2886769705e62f3525a4e0cfe377fe2fafe154d166a25621c8f0d6efa512de25"} Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.685737 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5986db9b4f-mbwmz" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.685884 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5986db9b4f-mbwmz" event={"ID":"6885c707-693a-4af7-a082-46b2e070d0ed","Type":"ContainerDied","Data":"590fad7e00b5ec0118d124a1c8845a4bc1b06973b3c7a82ad6d30593fbf28a7b"} Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.685929 5003 scope.go:117] "RemoveContainer" containerID="def33478b0e874016532e007bbfe89a4426c101779df26ff48e0cb498a7ed51c" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.691471 5003 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.691646 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-7adac5a9-4bee-452a-8dd4-2fbc8f2afee3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7adac5a9-4bee-452a-8dd4-2fbc8f2afee3\") pod \"openstack-galera-0\" (UID: \"c48613d9-a038-4293-855b-6d05642cc386\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f182230dd49e1567c2effae0bb717a76e9507f69e472a9138e0c06a92c05942c/globalmount\"" pod="openstack/openstack-galera-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.694980 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"3a5ca80d-195f-4290-8db3-79ebbe58387c","Type":"ContainerStarted","Data":"29edd86384a1c0b5e7a569210efa9efc107b4461d83045399ae946d5f9d88848"} Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.697722 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzjjf\" (UniqueName: \"kubernetes.io/projected/c48613d9-a038-4293-855b-6d05642cc386-kube-api-access-fzjjf\") pod \"openstack-galera-0\" (UID: \"c48613d9-a038-4293-855b-6d05642cc386\") " pod="openstack/openstack-galera-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.700682 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56bbd59dc5-5j9d5" event={"ID":"3efa2d4a-3172-4b0e-8b22-a5d33aec6e32","Type":"ContainerDied","Data":"315075f0311060c3b7e9abe977300a52fa32d41229143aced09501fb336f3a94"} Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.700740 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56bbd59dc5-5j9d5" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.737237 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d79f765b5-72qsj" event={"ID":"330d5594-4379-4cb8-80af-0af1c0ff29e8","Type":"ContainerStarted","Data":"2078f607e923af61374a8be1a095eb75403c65fb3851f4b2f73e9a465877aedf"} Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.737895 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5d79f765b5-72qsj" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.765316 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5d79f765b5-72qsj" podStartSLOduration=2.7652891349999997 podStartE2EDuration="2.765289135s" podCreationTimestamp="2026-01-04 13:11:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 13:11:57.751599432 +0000 UTC m=+5033.224629273" watchObservedRunningTime="2026-01-04 13:11:57.765289135 +0000 UTC m=+5033.238318976" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.796557 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-7adac5a9-4bee-452a-8dd4-2fbc8f2afee3\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7adac5a9-4bee-452a-8dd4-2fbc8f2afee3\") pod \"openstack-galera-0\" (UID: \"c48613d9-a038-4293-855b-6d05642cc386\") " pod="openstack/openstack-galera-0" Jan 04 13:11:57 crc kubenswrapper[5003]: I0104 13:11:57.956169 5003 scope.go:117] "RemoveContainer" containerID="9fe1f7d1e9770792ffe0dc4fbea5baf27ad95802d2d625759118eebfcd6ca328" Jan 04 13:11:58 crc kubenswrapper[5003]: I0104 13:11:58.012875 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Jan 04 13:11:58 crc kubenswrapper[5003]: I0104 13:11:58.213935 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56bbd59dc5-5j9d5"] Jan 04 13:11:58 crc kubenswrapper[5003]: I0104 13:11:58.222731 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-56bbd59dc5-5j9d5"] Jan 04 13:11:58 crc kubenswrapper[5003]: I0104 13:11:58.248273 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5986db9b4f-mbwmz"] Jan 04 13:11:58 crc kubenswrapper[5003]: I0104 13:11:58.260768 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5986db9b4f-mbwmz"] Jan 04 13:11:58 crc kubenswrapper[5003]: W0104 13:11:58.478058 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc48613d9_a038_4293_855b_6d05642cc386.slice/crio-1134fab11f722159fda39782b8dc2ad6324611df85e03d0981cdf8d90021567e WatchSource:0}: Error finding container 1134fab11f722159fda39782b8dc2ad6324611df85e03d0981cdf8d90021567e: Status 404 returned error can't find the container with id 1134fab11f722159fda39782b8dc2ad6324611df85e03d0981cdf8d90021567e Jan 04 13:11:58 crc kubenswrapper[5003]: I0104 13:11:58.482398 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Jan 04 13:11:58 crc kubenswrapper[5003]: I0104 13:11:58.741711 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c48613d9-a038-4293-855b-6d05642cc386","Type":"ContainerStarted","Data":"4234b6d96d64d1ea6a325cdff5f35089449103855335c48cffc0103412b564ea"} Jan 04 13:11:58 crc kubenswrapper[5003]: I0104 13:11:58.741752 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c48613d9-a038-4293-855b-6d05642cc386","Type":"ContainerStarted","Data":"1134fab11f722159fda39782b8dc2ad6324611df85e03d0981cdf8d90021567e"} Jan 04 13:11:58 crc kubenswrapper[5003]: I0104 13:11:58.744782 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865d9b578f-k8bmn" event={"ID":"7e71e91a-dc90-44c0-a951-3b307204ecd0","Type":"ContainerStarted","Data":"f565a1c91f5b271f26214c5e16761636bd310c6b9b5dda50bd17b067ac6c0db6"} Jan 04 13:11:58 crc kubenswrapper[5003]: I0104 13:11:58.745053 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-865d9b578f-k8bmn" Jan 04 13:11:58 crc kubenswrapper[5003]: I0104 13:11:58.746627 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"891e210d-2fe4-4439-905f-e36b8e427eb8","Type":"ContainerStarted","Data":"786a99417986cd76ae457dd06385e14e3d543a53f77fc61005746017eaf7d574"} Jan 04 13:11:58 crc kubenswrapper[5003]: I0104 13:11:58.827692 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-865d9b578f-k8bmn" podStartSLOduration=4.827671005 podStartE2EDuration="4.827671005s" podCreationTimestamp="2026-01-04 13:11:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 13:11:58.824733997 +0000 UTC m=+5034.297763868" watchObservedRunningTime="2026-01-04 13:11:58.827671005 +0000 UTC m=+5034.300700866" Jan 04 13:11:58 crc kubenswrapper[5003]: I0104 13:11:58.828931 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3efa2d4a-3172-4b0e-8b22-a5d33aec6e32" path="/var/lib/kubelet/pods/3efa2d4a-3172-4b0e-8b22-a5d33aec6e32/volumes" Jan 04 13:11:58 crc kubenswrapper[5003]: I0104 13:11:58.831933 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6885c707-693a-4af7-a082-46b2e070d0ed" path="/var/lib/kubelet/pods/6885c707-693a-4af7-a082-46b2e070d0ed/volumes" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.014574 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.015824 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.018117 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.018244 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-rg9w9" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.018313 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.028033 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.041878 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.085168 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/00679dc8-7acc-4aaf-afe9-105458b3fd33-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"00679dc8-7acc-4aaf-afe9-105458b3fd33\") " pod="openstack/openstack-cell1-galera-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.085220 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/00679dc8-7acc-4aaf-afe9-105458b3fd33-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"00679dc8-7acc-4aaf-afe9-105458b3fd33\") " pod="openstack/openstack-cell1-galera-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.085244 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/00679dc8-7acc-4aaf-afe9-105458b3fd33-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"00679dc8-7acc-4aaf-afe9-105458b3fd33\") " pod="openstack/openstack-cell1-galera-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.085393 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/00679dc8-7acc-4aaf-afe9-105458b3fd33-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"00679dc8-7acc-4aaf-afe9-105458b3fd33\") " pod="openstack/openstack-cell1-galera-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.085429 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00679dc8-7acc-4aaf-afe9-105458b3fd33-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"00679dc8-7acc-4aaf-afe9-105458b3fd33\") " pod="openstack/openstack-cell1-galera-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.085450 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00679dc8-7acc-4aaf-afe9-105458b3fd33-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"00679dc8-7acc-4aaf-afe9-105458b3fd33\") " pod="openstack/openstack-cell1-galera-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.085471 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-838015f0-c755-42f7-b798-02b30362cac1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-838015f0-c755-42f7-b798-02b30362cac1\") pod \"openstack-cell1-galera-0\" (UID: \"00679dc8-7acc-4aaf-afe9-105458b3fd33\") " pod="openstack/openstack-cell1-galera-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.085510 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t2wn4\" (UniqueName: \"kubernetes.io/projected/00679dc8-7acc-4aaf-afe9-105458b3fd33-kube-api-access-t2wn4\") pod \"openstack-cell1-galera-0\" (UID: \"00679dc8-7acc-4aaf-afe9-105458b3fd33\") " pod="openstack/openstack-cell1-galera-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.186499 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/00679dc8-7acc-4aaf-afe9-105458b3fd33-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"00679dc8-7acc-4aaf-afe9-105458b3fd33\") " pod="openstack/openstack-cell1-galera-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.186575 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00679dc8-7acc-4aaf-afe9-105458b3fd33-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"00679dc8-7acc-4aaf-afe9-105458b3fd33\") " pod="openstack/openstack-cell1-galera-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.186608 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00679dc8-7acc-4aaf-afe9-105458b3fd33-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"00679dc8-7acc-4aaf-afe9-105458b3fd33\") " pod="openstack/openstack-cell1-galera-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.186636 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-838015f0-c755-42f7-b798-02b30362cac1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-838015f0-c755-42f7-b798-02b30362cac1\") pod \"openstack-cell1-galera-0\" (UID: \"00679dc8-7acc-4aaf-afe9-105458b3fd33\") " pod="openstack/openstack-cell1-galera-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.186683 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t2wn4\" (UniqueName: \"kubernetes.io/projected/00679dc8-7acc-4aaf-afe9-105458b3fd33-kube-api-access-t2wn4\") pod \"openstack-cell1-galera-0\" (UID: \"00679dc8-7acc-4aaf-afe9-105458b3fd33\") " pod="openstack/openstack-cell1-galera-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.186725 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/00679dc8-7acc-4aaf-afe9-105458b3fd33-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"00679dc8-7acc-4aaf-afe9-105458b3fd33\") " pod="openstack/openstack-cell1-galera-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.186762 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/00679dc8-7acc-4aaf-afe9-105458b3fd33-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"00679dc8-7acc-4aaf-afe9-105458b3fd33\") " pod="openstack/openstack-cell1-galera-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.186787 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/00679dc8-7acc-4aaf-afe9-105458b3fd33-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"00679dc8-7acc-4aaf-afe9-105458b3fd33\") " pod="openstack/openstack-cell1-galera-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.187994 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/00679dc8-7acc-4aaf-afe9-105458b3fd33-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"00679dc8-7acc-4aaf-afe9-105458b3fd33\") " pod="openstack/openstack-cell1-galera-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.188284 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/00679dc8-7acc-4aaf-afe9-105458b3fd33-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"00679dc8-7acc-4aaf-afe9-105458b3fd33\") " pod="openstack/openstack-cell1-galera-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.188335 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/00679dc8-7acc-4aaf-afe9-105458b3fd33-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"00679dc8-7acc-4aaf-afe9-105458b3fd33\") " pod="openstack/openstack-cell1-galera-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.188626 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00679dc8-7acc-4aaf-afe9-105458b3fd33-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"00679dc8-7acc-4aaf-afe9-105458b3fd33\") " pod="openstack/openstack-cell1-galera-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.191776 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00679dc8-7acc-4aaf-afe9-105458b3fd33-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"00679dc8-7acc-4aaf-afe9-105458b3fd33\") " pod="openstack/openstack-cell1-galera-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.191838 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/00679dc8-7acc-4aaf-afe9-105458b3fd33-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"00679dc8-7acc-4aaf-afe9-105458b3fd33\") " pod="openstack/openstack-cell1-galera-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.199632 5003 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.199685 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-838015f0-c755-42f7-b798-02b30362cac1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-838015f0-c755-42f7-b798-02b30362cac1\") pod \"openstack-cell1-galera-0\" (UID: \"00679dc8-7acc-4aaf-afe9-105458b3fd33\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/caa1057e13f7bcde4e20a1af564273430bad66c853d70234aeae2ec0060060e0/globalmount\"" pod="openstack/openstack-cell1-galera-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.206944 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t2wn4\" (UniqueName: \"kubernetes.io/projected/00679dc8-7acc-4aaf-afe9-105458b3fd33-kube-api-access-t2wn4\") pod \"openstack-cell1-galera-0\" (UID: \"00679dc8-7acc-4aaf-afe9-105458b3fd33\") " pod="openstack/openstack-cell1-galera-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.236222 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-838015f0-c755-42f7-b798-02b30362cac1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-838015f0-c755-42f7-b798-02b30362cac1\") pod \"openstack-cell1-galera-0\" (UID: \"00679dc8-7acc-4aaf-afe9-105458b3fd33\") " pod="openstack/openstack-cell1-galera-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.323662 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.324628 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.326366 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.326575 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-k4wzj" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.328180 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.333831 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.337196 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.390710 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/729222f1-c416-4981-9312-7c642a0fe9f1-combined-ca-bundle\") pod \"memcached-0\" (UID: \"729222f1-c416-4981-9312-7c642a0fe9f1\") " pod="openstack/memcached-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.391057 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8nqrr\" (UniqueName: \"kubernetes.io/projected/729222f1-c416-4981-9312-7c642a0fe9f1-kube-api-access-8nqrr\") pod \"memcached-0\" (UID: \"729222f1-c416-4981-9312-7c642a0fe9f1\") " pod="openstack/memcached-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.391101 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/729222f1-c416-4981-9312-7c642a0fe9f1-kolla-config\") pod \"memcached-0\" (UID: \"729222f1-c416-4981-9312-7c642a0fe9f1\") " pod="openstack/memcached-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.391120 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/729222f1-c416-4981-9312-7c642a0fe9f1-memcached-tls-certs\") pod \"memcached-0\" (UID: \"729222f1-c416-4981-9312-7c642a0fe9f1\") " pod="openstack/memcached-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.391173 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/729222f1-c416-4981-9312-7c642a0fe9f1-config-data\") pod \"memcached-0\" (UID: \"729222f1-c416-4981-9312-7c642a0fe9f1\") " pod="openstack/memcached-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.492633 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/729222f1-c416-4981-9312-7c642a0fe9f1-config-data\") pod \"memcached-0\" (UID: \"729222f1-c416-4981-9312-7c642a0fe9f1\") " pod="openstack/memcached-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.492726 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/729222f1-c416-4981-9312-7c642a0fe9f1-combined-ca-bundle\") pod \"memcached-0\" (UID: \"729222f1-c416-4981-9312-7c642a0fe9f1\") " pod="openstack/memcached-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.492770 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8nqrr\" (UniqueName: \"kubernetes.io/projected/729222f1-c416-4981-9312-7c642a0fe9f1-kube-api-access-8nqrr\") pod \"memcached-0\" (UID: \"729222f1-c416-4981-9312-7c642a0fe9f1\") " pod="openstack/memcached-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.492823 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/729222f1-c416-4981-9312-7c642a0fe9f1-kolla-config\") pod \"memcached-0\" (UID: \"729222f1-c416-4981-9312-7c642a0fe9f1\") " pod="openstack/memcached-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.492853 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/729222f1-c416-4981-9312-7c642a0fe9f1-memcached-tls-certs\") pod \"memcached-0\" (UID: \"729222f1-c416-4981-9312-7c642a0fe9f1\") " pod="openstack/memcached-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.494800 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/729222f1-c416-4981-9312-7c642a0fe9f1-kolla-config\") pod \"memcached-0\" (UID: \"729222f1-c416-4981-9312-7c642a0fe9f1\") " pod="openstack/memcached-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.494897 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/729222f1-c416-4981-9312-7c642a0fe9f1-config-data\") pod \"memcached-0\" (UID: \"729222f1-c416-4981-9312-7c642a0fe9f1\") " pod="openstack/memcached-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.497370 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/729222f1-c416-4981-9312-7c642a0fe9f1-memcached-tls-certs\") pod \"memcached-0\" (UID: \"729222f1-c416-4981-9312-7c642a0fe9f1\") " pod="openstack/memcached-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.500791 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/729222f1-c416-4981-9312-7c642a0fe9f1-combined-ca-bundle\") pod \"memcached-0\" (UID: \"729222f1-c416-4981-9312-7c642a0fe9f1\") " pod="openstack/memcached-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.512991 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8nqrr\" (UniqueName: \"kubernetes.io/projected/729222f1-c416-4981-9312-7c642a0fe9f1-kube-api-access-8nqrr\") pod \"memcached-0\" (UID: \"729222f1-c416-4981-9312-7c642a0fe9f1\") " pod="openstack/memcached-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.687286 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.760266 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"3a5ca80d-195f-4290-8db3-79ebbe58387c","Type":"ContainerStarted","Data":"3cb5a4fb779d57b95b81bef8e28f5af91419fa60622983e5e7d1178cb874f937"} Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.785220 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Jan 04 13:11:59 crc kubenswrapper[5003]: I0104 13:11:59.807179 5003 scope.go:117] "RemoveContainer" containerID="ab89a3503b868c615a10b0dddcf9602282c1df798b3f18e301d8cf21d5ab3516" Jan 04 13:11:59 crc kubenswrapper[5003]: E0104 13:11:59.807393 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:12:00 crc kubenswrapper[5003]: I0104 13:12:00.114746 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Jan 04 13:12:00 crc kubenswrapper[5003]: I0104 13:12:00.769839 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"00679dc8-7acc-4aaf-afe9-105458b3fd33","Type":"ContainerStarted","Data":"552a400e3ccc57cfee600e9f0ec87f2bf38b24c0600d39c435908ad842e16fe0"} Jan 04 13:12:00 crc kubenswrapper[5003]: I0104 13:12:00.769919 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"00679dc8-7acc-4aaf-afe9-105458b3fd33","Type":"ContainerStarted","Data":"4c82abf53ab66057e4efd15a2158e97b13ea072519eaf7a63e2b4a4ab7f36088"} Jan 04 13:12:00 crc kubenswrapper[5003]: I0104 13:12:00.772387 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"729222f1-c416-4981-9312-7c642a0fe9f1","Type":"ContainerStarted","Data":"15fe4852d301826ea26bacdf007a64b3ec1922a80604733218c816928cbcfd83"} Jan 04 13:12:00 crc kubenswrapper[5003]: I0104 13:12:00.772498 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"729222f1-c416-4981-9312-7c642a0fe9f1","Type":"ContainerStarted","Data":"5b5b199a5cb7d176274ede863db5bb477092cc7100b302b9530e6ff75de424f5"} Jan 04 13:12:00 crc kubenswrapper[5003]: I0104 13:12:00.822909 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=1.8228763890000002 podStartE2EDuration="1.822876389s" podCreationTimestamp="2026-01-04 13:11:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 13:12:00.821562524 +0000 UTC m=+5036.294592405" watchObservedRunningTime="2026-01-04 13:12:00.822876389 +0000 UTC m=+5036.295906270" Jan 04 13:12:01 crc kubenswrapper[5003]: I0104 13:12:01.783810 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Jan 04 13:12:02 crc kubenswrapper[5003]: I0104 13:12:02.795358 5003 generic.go:334] "Generic (PLEG): container finished" podID="c48613d9-a038-4293-855b-6d05642cc386" containerID="4234b6d96d64d1ea6a325cdff5f35089449103855335c48cffc0103412b564ea" exitCode=0 Jan 04 13:12:02 crc kubenswrapper[5003]: I0104 13:12:02.795841 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c48613d9-a038-4293-855b-6d05642cc386","Type":"ContainerDied","Data":"4234b6d96d64d1ea6a325cdff5f35089449103855335c48cffc0103412b564ea"} Jan 04 13:12:03 crc kubenswrapper[5003]: I0104 13:12:03.844356 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c48613d9-a038-4293-855b-6d05642cc386","Type":"ContainerStarted","Data":"cf0226a6796220b72852c9c3e34b39ef79005c04c418e97e0706eef52889d709"} Jan 04 13:12:03 crc kubenswrapper[5003]: I0104 13:12:03.846648 5003 generic.go:334] "Generic (PLEG): container finished" podID="00679dc8-7acc-4aaf-afe9-105458b3fd33" containerID="552a400e3ccc57cfee600e9f0ec87f2bf38b24c0600d39c435908ad842e16fe0" exitCode=0 Jan 04 13:12:03 crc kubenswrapper[5003]: I0104 13:12:03.846701 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"00679dc8-7acc-4aaf-afe9-105458b3fd33","Type":"ContainerDied","Data":"552a400e3ccc57cfee600e9f0ec87f2bf38b24c0600d39c435908ad842e16fe0"} Jan 04 13:12:03 crc kubenswrapper[5003]: I0104 13:12:03.909166 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=7.909131962 podStartE2EDuration="7.909131962s" podCreationTimestamp="2026-01-04 13:11:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 13:12:03.869226093 +0000 UTC m=+5039.342255944" watchObservedRunningTime="2026-01-04 13:12:03.909131962 +0000 UTC m=+5039.382161823" Jan 04 13:12:04 crc kubenswrapper[5003]: I0104 13:12:04.858333 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"00679dc8-7acc-4aaf-afe9-105458b3fd33","Type":"ContainerStarted","Data":"0a58dac99c451ea45fea0c4d7dfe5a5a7d7d01fd8681478bb0b1bfeb320c6972"} Jan 04 13:12:04 crc kubenswrapper[5003]: I0104 13:12:04.887440 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=7.88741521 podStartE2EDuration="7.88741521s" podCreationTimestamp="2026-01-04 13:11:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 13:12:04.885404167 +0000 UTC m=+5040.358434038" watchObservedRunningTime="2026-01-04 13:12:04.88741521 +0000 UTC m=+5040.360445051" Jan 04 13:12:05 crc kubenswrapper[5003]: I0104 13:12:05.207716 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-865d9b578f-k8bmn" Jan 04 13:12:05 crc kubenswrapper[5003]: I0104 13:12:05.968200 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5d79f765b5-72qsj" Jan 04 13:12:06 crc kubenswrapper[5003]: I0104 13:12:06.023307 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-865d9b578f-k8bmn"] Jan 04 13:12:06 crc kubenswrapper[5003]: I0104 13:12:06.023591 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-865d9b578f-k8bmn" podUID="7e71e91a-dc90-44c0-a951-3b307204ecd0" containerName="dnsmasq-dns" containerID="cri-o://f565a1c91f5b271f26214c5e16761636bd310c6b9b5dda50bd17b067ac6c0db6" gracePeriod=10 Jan 04 13:12:06 crc kubenswrapper[5003]: I0104 13:12:06.494435 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865d9b578f-k8bmn" Jan 04 13:12:06 crc kubenswrapper[5003]: I0104 13:12:06.665611 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7e71e91a-dc90-44c0-a951-3b307204ecd0-dns-svc\") pod \"7e71e91a-dc90-44c0-a951-3b307204ecd0\" (UID: \"7e71e91a-dc90-44c0-a951-3b307204ecd0\") " Jan 04 13:12:06 crc kubenswrapper[5003]: I0104 13:12:06.665803 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bl4n9\" (UniqueName: \"kubernetes.io/projected/7e71e91a-dc90-44c0-a951-3b307204ecd0-kube-api-access-bl4n9\") pod \"7e71e91a-dc90-44c0-a951-3b307204ecd0\" (UID: \"7e71e91a-dc90-44c0-a951-3b307204ecd0\") " Jan 04 13:12:06 crc kubenswrapper[5003]: I0104 13:12:06.665931 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e71e91a-dc90-44c0-a951-3b307204ecd0-config\") pod \"7e71e91a-dc90-44c0-a951-3b307204ecd0\" (UID: \"7e71e91a-dc90-44c0-a951-3b307204ecd0\") " Jan 04 13:12:06 crc kubenswrapper[5003]: I0104 13:12:06.672423 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e71e91a-dc90-44c0-a951-3b307204ecd0-kube-api-access-bl4n9" (OuterVolumeSpecName: "kube-api-access-bl4n9") pod "7e71e91a-dc90-44c0-a951-3b307204ecd0" (UID: "7e71e91a-dc90-44c0-a951-3b307204ecd0"). InnerVolumeSpecName "kube-api-access-bl4n9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:12:06 crc kubenswrapper[5003]: I0104 13:12:06.711613 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e71e91a-dc90-44c0-a951-3b307204ecd0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7e71e91a-dc90-44c0-a951-3b307204ecd0" (UID: "7e71e91a-dc90-44c0-a951-3b307204ecd0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 13:12:06 crc kubenswrapper[5003]: I0104 13:12:06.712084 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e71e91a-dc90-44c0-a951-3b307204ecd0-config" (OuterVolumeSpecName: "config") pod "7e71e91a-dc90-44c0-a951-3b307204ecd0" (UID: "7e71e91a-dc90-44c0-a951-3b307204ecd0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 13:12:06 crc kubenswrapper[5003]: I0104 13:12:06.768156 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e71e91a-dc90-44c0-a951-3b307204ecd0-config\") on node \"crc\" DevicePath \"\"" Jan 04 13:12:06 crc kubenswrapper[5003]: I0104 13:12:06.768208 5003 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7e71e91a-dc90-44c0-a951-3b307204ecd0-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 13:12:06 crc kubenswrapper[5003]: I0104 13:12:06.768226 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bl4n9\" (UniqueName: \"kubernetes.io/projected/7e71e91a-dc90-44c0-a951-3b307204ecd0-kube-api-access-bl4n9\") on node \"crc\" DevicePath \"\"" Jan 04 13:12:06 crc kubenswrapper[5003]: I0104 13:12:06.881466 5003 generic.go:334] "Generic (PLEG): container finished" podID="7e71e91a-dc90-44c0-a951-3b307204ecd0" containerID="f565a1c91f5b271f26214c5e16761636bd310c6b9b5dda50bd17b067ac6c0db6" exitCode=0 Jan 04 13:12:06 crc kubenswrapper[5003]: I0104 13:12:06.881587 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865d9b578f-k8bmn" event={"ID":"7e71e91a-dc90-44c0-a951-3b307204ecd0","Type":"ContainerDied","Data":"f565a1c91f5b271f26214c5e16761636bd310c6b9b5dda50bd17b067ac6c0db6"} Jan 04 13:12:06 crc kubenswrapper[5003]: I0104 13:12:06.881605 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865d9b578f-k8bmn" Jan 04 13:12:06 crc kubenswrapper[5003]: I0104 13:12:06.881999 5003 scope.go:117] "RemoveContainer" containerID="f565a1c91f5b271f26214c5e16761636bd310c6b9b5dda50bd17b067ac6c0db6" Jan 04 13:12:06 crc kubenswrapper[5003]: I0104 13:12:06.881909 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865d9b578f-k8bmn" event={"ID":"7e71e91a-dc90-44c0-a951-3b307204ecd0","Type":"ContainerDied","Data":"801f6e3eeddfb78489d85fd20e676b63411783ecc85ede707d0d6af80e550f29"} Jan 04 13:12:06 crc kubenswrapper[5003]: I0104 13:12:06.916180 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-865d9b578f-k8bmn"] Jan 04 13:12:06 crc kubenswrapper[5003]: I0104 13:12:06.917617 5003 scope.go:117] "RemoveContainer" containerID="d87d80e72f64c823940c458b76e7cf8d37b4ebca986013368dbd2a20686d93dd" Jan 04 13:12:06 crc kubenswrapper[5003]: I0104 13:12:06.923293 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-865d9b578f-k8bmn"] Jan 04 13:12:06 crc kubenswrapper[5003]: I0104 13:12:06.944133 5003 scope.go:117] "RemoveContainer" containerID="f565a1c91f5b271f26214c5e16761636bd310c6b9b5dda50bd17b067ac6c0db6" Jan 04 13:12:06 crc kubenswrapper[5003]: E0104 13:12:06.949201 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f565a1c91f5b271f26214c5e16761636bd310c6b9b5dda50bd17b067ac6c0db6\": container with ID starting with f565a1c91f5b271f26214c5e16761636bd310c6b9b5dda50bd17b067ac6c0db6 not found: ID does not exist" containerID="f565a1c91f5b271f26214c5e16761636bd310c6b9b5dda50bd17b067ac6c0db6" Jan 04 13:12:06 crc kubenswrapper[5003]: I0104 13:12:06.949382 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f565a1c91f5b271f26214c5e16761636bd310c6b9b5dda50bd17b067ac6c0db6"} err="failed to get container status \"f565a1c91f5b271f26214c5e16761636bd310c6b9b5dda50bd17b067ac6c0db6\": rpc error: code = NotFound desc = could not find container \"f565a1c91f5b271f26214c5e16761636bd310c6b9b5dda50bd17b067ac6c0db6\": container with ID starting with f565a1c91f5b271f26214c5e16761636bd310c6b9b5dda50bd17b067ac6c0db6 not found: ID does not exist" Jan 04 13:12:06 crc kubenswrapper[5003]: I0104 13:12:06.949497 5003 scope.go:117] "RemoveContainer" containerID="d87d80e72f64c823940c458b76e7cf8d37b4ebca986013368dbd2a20686d93dd" Jan 04 13:12:06 crc kubenswrapper[5003]: E0104 13:12:06.950360 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d87d80e72f64c823940c458b76e7cf8d37b4ebca986013368dbd2a20686d93dd\": container with ID starting with d87d80e72f64c823940c458b76e7cf8d37b4ebca986013368dbd2a20686d93dd not found: ID does not exist" containerID="d87d80e72f64c823940c458b76e7cf8d37b4ebca986013368dbd2a20686d93dd" Jan 04 13:12:06 crc kubenswrapper[5003]: I0104 13:12:06.950406 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d87d80e72f64c823940c458b76e7cf8d37b4ebca986013368dbd2a20686d93dd"} err="failed to get container status \"d87d80e72f64c823940c458b76e7cf8d37b4ebca986013368dbd2a20686d93dd\": rpc error: code = NotFound desc = could not find container \"d87d80e72f64c823940c458b76e7cf8d37b4ebca986013368dbd2a20686d93dd\": container with ID starting with d87d80e72f64c823940c458b76e7cf8d37b4ebca986013368dbd2a20686d93dd not found: ID does not exist" Jan 04 13:12:08 crc kubenswrapper[5003]: I0104 13:12:08.014153 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Jan 04 13:12:08 crc kubenswrapper[5003]: I0104 13:12:08.014516 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Jan 04 13:12:08 crc kubenswrapper[5003]: I0104 13:12:08.820694 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e71e91a-dc90-44c0-a951-3b307204ecd0" path="/var/lib/kubelet/pods/7e71e91a-dc90-44c0-a951-3b307204ecd0/volumes" Jan 04 13:12:09 crc kubenswrapper[5003]: I0104 13:12:09.338394 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Jan 04 13:12:09 crc kubenswrapper[5003]: I0104 13:12:09.338442 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Jan 04 13:12:09 crc kubenswrapper[5003]: I0104 13:12:09.689387 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Jan 04 13:12:10 crc kubenswrapper[5003]: I0104 13:12:10.357463 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Jan 04 13:12:10 crc kubenswrapper[5003]: I0104 13:12:10.487506 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Jan 04 13:12:10 crc kubenswrapper[5003]: I0104 13:12:10.806788 5003 scope.go:117] "RemoveContainer" containerID="ab89a3503b868c615a10b0dddcf9602282c1df798b3f18e301d8cf21d5ab3516" Jan 04 13:12:10 crc kubenswrapper[5003]: E0104 13:12:10.807164 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:12:11 crc kubenswrapper[5003]: I0104 13:12:11.624294 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Jan 04 13:12:11 crc kubenswrapper[5003]: I0104 13:12:11.730923 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Jan 04 13:12:16 crc kubenswrapper[5003]: I0104 13:12:16.368752 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-9cm4h"] Jan 04 13:12:16 crc kubenswrapper[5003]: E0104 13:12:16.369667 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e71e91a-dc90-44c0-a951-3b307204ecd0" containerName="dnsmasq-dns" Jan 04 13:12:16 crc kubenswrapper[5003]: I0104 13:12:16.369683 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e71e91a-dc90-44c0-a951-3b307204ecd0" containerName="dnsmasq-dns" Jan 04 13:12:16 crc kubenswrapper[5003]: E0104 13:12:16.369700 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e71e91a-dc90-44c0-a951-3b307204ecd0" containerName="init" Jan 04 13:12:16 crc kubenswrapper[5003]: I0104 13:12:16.369712 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e71e91a-dc90-44c0-a951-3b307204ecd0" containerName="init" Jan 04 13:12:16 crc kubenswrapper[5003]: I0104 13:12:16.369897 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e71e91a-dc90-44c0-a951-3b307204ecd0" containerName="dnsmasq-dns" Jan 04 13:12:16 crc kubenswrapper[5003]: I0104 13:12:16.370543 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-9cm4h" Jan 04 13:12:16 crc kubenswrapper[5003]: I0104 13:12:16.373256 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Jan 04 13:12:16 crc kubenswrapper[5003]: I0104 13:12:16.386522 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-9cm4h"] Jan 04 13:12:16 crc kubenswrapper[5003]: I0104 13:12:16.573224 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/573b862b-bae7-4181-a08e-cc67471bb486-operator-scripts\") pod \"root-account-create-update-9cm4h\" (UID: \"573b862b-bae7-4181-a08e-cc67471bb486\") " pod="openstack/root-account-create-update-9cm4h" Jan 04 13:12:16 crc kubenswrapper[5003]: I0104 13:12:16.573294 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhlj4\" (UniqueName: \"kubernetes.io/projected/573b862b-bae7-4181-a08e-cc67471bb486-kube-api-access-jhlj4\") pod \"root-account-create-update-9cm4h\" (UID: \"573b862b-bae7-4181-a08e-cc67471bb486\") " pod="openstack/root-account-create-update-9cm4h" Jan 04 13:12:16 crc kubenswrapper[5003]: I0104 13:12:16.675005 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhlj4\" (UniqueName: \"kubernetes.io/projected/573b862b-bae7-4181-a08e-cc67471bb486-kube-api-access-jhlj4\") pod \"root-account-create-update-9cm4h\" (UID: \"573b862b-bae7-4181-a08e-cc67471bb486\") " pod="openstack/root-account-create-update-9cm4h" Jan 04 13:12:16 crc kubenswrapper[5003]: I0104 13:12:16.675197 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/573b862b-bae7-4181-a08e-cc67471bb486-operator-scripts\") pod \"root-account-create-update-9cm4h\" (UID: \"573b862b-bae7-4181-a08e-cc67471bb486\") " pod="openstack/root-account-create-update-9cm4h" Jan 04 13:12:16 crc kubenswrapper[5003]: I0104 13:12:16.676337 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/573b862b-bae7-4181-a08e-cc67471bb486-operator-scripts\") pod \"root-account-create-update-9cm4h\" (UID: \"573b862b-bae7-4181-a08e-cc67471bb486\") " pod="openstack/root-account-create-update-9cm4h" Jan 04 13:12:16 crc kubenswrapper[5003]: I0104 13:12:16.694154 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhlj4\" (UniqueName: \"kubernetes.io/projected/573b862b-bae7-4181-a08e-cc67471bb486-kube-api-access-jhlj4\") pod \"root-account-create-update-9cm4h\" (UID: \"573b862b-bae7-4181-a08e-cc67471bb486\") " pod="openstack/root-account-create-update-9cm4h" Jan 04 13:12:16 crc kubenswrapper[5003]: I0104 13:12:16.701891 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-9cm4h" Jan 04 13:12:17 crc kubenswrapper[5003]: I0104 13:12:17.114049 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-9cm4h"] Jan 04 13:12:17 crc kubenswrapper[5003]: I0104 13:12:17.972465 5003 generic.go:334] "Generic (PLEG): container finished" podID="573b862b-bae7-4181-a08e-cc67471bb486" containerID="9dc63ccc6b38483c46725558dec45fa17a62b8b20968bbd43dd9242a22a34c33" exitCode=0 Jan 04 13:12:17 crc kubenswrapper[5003]: I0104 13:12:17.972507 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-9cm4h" event={"ID":"573b862b-bae7-4181-a08e-cc67471bb486","Type":"ContainerDied","Data":"9dc63ccc6b38483c46725558dec45fa17a62b8b20968bbd43dd9242a22a34c33"} Jan 04 13:12:17 crc kubenswrapper[5003]: I0104 13:12:17.972532 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-9cm4h" event={"ID":"573b862b-bae7-4181-a08e-cc67471bb486","Type":"ContainerStarted","Data":"9001b6b12c9baddba44e4d37b771a529a247ee0d36800b44f2edfd5157f76198"} Jan 04 13:12:19 crc kubenswrapper[5003]: I0104 13:12:19.400210 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-9cm4h" Jan 04 13:12:19 crc kubenswrapper[5003]: I0104 13:12:19.519062 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/573b862b-bae7-4181-a08e-cc67471bb486-operator-scripts\") pod \"573b862b-bae7-4181-a08e-cc67471bb486\" (UID: \"573b862b-bae7-4181-a08e-cc67471bb486\") " Jan 04 13:12:19 crc kubenswrapper[5003]: I0104 13:12:19.519607 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhlj4\" (UniqueName: \"kubernetes.io/projected/573b862b-bae7-4181-a08e-cc67471bb486-kube-api-access-jhlj4\") pod \"573b862b-bae7-4181-a08e-cc67471bb486\" (UID: \"573b862b-bae7-4181-a08e-cc67471bb486\") " Jan 04 13:12:19 crc kubenswrapper[5003]: I0104 13:12:19.520340 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/573b862b-bae7-4181-a08e-cc67471bb486-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "573b862b-bae7-4181-a08e-cc67471bb486" (UID: "573b862b-bae7-4181-a08e-cc67471bb486"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 13:12:19 crc kubenswrapper[5003]: I0104 13:12:19.527328 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/573b862b-bae7-4181-a08e-cc67471bb486-kube-api-access-jhlj4" (OuterVolumeSpecName: "kube-api-access-jhlj4") pod "573b862b-bae7-4181-a08e-cc67471bb486" (UID: "573b862b-bae7-4181-a08e-cc67471bb486"). InnerVolumeSpecName "kube-api-access-jhlj4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:12:19 crc kubenswrapper[5003]: I0104 13:12:19.621196 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhlj4\" (UniqueName: \"kubernetes.io/projected/573b862b-bae7-4181-a08e-cc67471bb486-kube-api-access-jhlj4\") on node \"crc\" DevicePath \"\"" Jan 04 13:12:19 crc kubenswrapper[5003]: I0104 13:12:19.621232 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/573b862b-bae7-4181-a08e-cc67471bb486-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 13:12:19 crc kubenswrapper[5003]: I0104 13:12:19.990739 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-9cm4h" event={"ID":"573b862b-bae7-4181-a08e-cc67471bb486","Type":"ContainerDied","Data":"9001b6b12c9baddba44e4d37b771a529a247ee0d36800b44f2edfd5157f76198"} Jan 04 13:12:19 crc kubenswrapper[5003]: I0104 13:12:19.991214 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9001b6b12c9baddba44e4d37b771a529a247ee0d36800b44f2edfd5157f76198" Jan 04 13:12:19 crc kubenswrapper[5003]: I0104 13:12:19.990871 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-9cm4h" Jan 04 13:12:22 crc kubenswrapper[5003]: I0104 13:12:22.806961 5003 scope.go:117] "RemoveContainer" containerID="ab89a3503b868c615a10b0dddcf9602282c1df798b3f18e301d8cf21d5ab3516" Jan 04 13:12:22 crc kubenswrapper[5003]: E0104 13:12:22.807620 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:12:23 crc kubenswrapper[5003]: I0104 13:12:23.002432 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-9cm4h"] Jan 04 13:12:23 crc kubenswrapper[5003]: I0104 13:12:23.013060 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-9cm4h"] Jan 04 13:12:24 crc kubenswrapper[5003]: I0104 13:12:24.828056 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="573b862b-bae7-4181-a08e-cc67471bb486" path="/var/lib/kubelet/pods/573b862b-bae7-4181-a08e-cc67471bb486/volumes" Jan 04 13:12:28 crc kubenswrapper[5003]: I0104 13:12:28.001593 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-rh47m"] Jan 04 13:12:28 crc kubenswrapper[5003]: E0104 13:12:28.002401 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="573b862b-bae7-4181-a08e-cc67471bb486" containerName="mariadb-account-create-update" Jan 04 13:12:28 crc kubenswrapper[5003]: I0104 13:12:28.002424 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="573b862b-bae7-4181-a08e-cc67471bb486" containerName="mariadb-account-create-update" Jan 04 13:12:28 crc kubenswrapper[5003]: I0104 13:12:28.002611 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="573b862b-bae7-4181-a08e-cc67471bb486" containerName="mariadb-account-create-update" Jan 04 13:12:28 crc kubenswrapper[5003]: I0104 13:12:28.003316 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-rh47m" Jan 04 13:12:28 crc kubenswrapper[5003]: I0104 13:12:28.006384 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-mariadb-root-db-secret" Jan 04 13:12:28 crc kubenswrapper[5003]: I0104 13:12:28.010998 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-rh47m"] Jan 04 13:12:28 crc kubenswrapper[5003]: I0104 13:12:28.069307 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9a464e30-2e05-4592-940e-5335bb92b086-operator-scripts\") pod \"root-account-create-update-rh47m\" (UID: \"9a464e30-2e05-4592-940e-5335bb92b086\") " pod="openstack/root-account-create-update-rh47m" Jan 04 13:12:28 crc kubenswrapper[5003]: I0104 13:12:28.069686 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwwvm\" (UniqueName: \"kubernetes.io/projected/9a464e30-2e05-4592-940e-5335bb92b086-kube-api-access-gwwvm\") pod \"root-account-create-update-rh47m\" (UID: \"9a464e30-2e05-4592-940e-5335bb92b086\") " pod="openstack/root-account-create-update-rh47m" Jan 04 13:12:28 crc kubenswrapper[5003]: I0104 13:12:28.170786 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwwvm\" (UniqueName: \"kubernetes.io/projected/9a464e30-2e05-4592-940e-5335bb92b086-kube-api-access-gwwvm\") pod \"root-account-create-update-rh47m\" (UID: \"9a464e30-2e05-4592-940e-5335bb92b086\") " pod="openstack/root-account-create-update-rh47m" Jan 04 13:12:28 crc kubenswrapper[5003]: I0104 13:12:28.170872 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9a464e30-2e05-4592-940e-5335bb92b086-operator-scripts\") pod \"root-account-create-update-rh47m\" (UID: \"9a464e30-2e05-4592-940e-5335bb92b086\") " pod="openstack/root-account-create-update-rh47m" Jan 04 13:12:28 crc kubenswrapper[5003]: I0104 13:12:28.171591 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9a464e30-2e05-4592-940e-5335bb92b086-operator-scripts\") pod \"root-account-create-update-rh47m\" (UID: \"9a464e30-2e05-4592-940e-5335bb92b086\") " pod="openstack/root-account-create-update-rh47m" Jan 04 13:12:28 crc kubenswrapper[5003]: I0104 13:12:28.210623 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwwvm\" (UniqueName: \"kubernetes.io/projected/9a464e30-2e05-4592-940e-5335bb92b086-kube-api-access-gwwvm\") pod \"root-account-create-update-rh47m\" (UID: \"9a464e30-2e05-4592-940e-5335bb92b086\") " pod="openstack/root-account-create-update-rh47m" Jan 04 13:12:28 crc kubenswrapper[5003]: I0104 13:12:28.335123 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-rh47m" Jan 04 13:12:28 crc kubenswrapper[5003]: I0104 13:12:28.839872 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-rh47m"] Jan 04 13:12:29 crc kubenswrapper[5003]: I0104 13:12:29.080100 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-rh47m" event={"ID":"9a464e30-2e05-4592-940e-5335bb92b086","Type":"ContainerStarted","Data":"c62b35f730a09d55865823176053e707ffcd8151dd4cebfcd3266745583c8b10"} Jan 04 13:12:30 crc kubenswrapper[5003]: I0104 13:12:30.091529 5003 generic.go:334] "Generic (PLEG): container finished" podID="9a464e30-2e05-4592-940e-5335bb92b086" containerID="37479772f97e43e4755e6d9eb0ebf34db29c771973543d3c5c34a49e93643e56" exitCode=0 Jan 04 13:12:30 crc kubenswrapper[5003]: I0104 13:12:30.091589 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-rh47m" event={"ID":"9a464e30-2e05-4592-940e-5335bb92b086","Type":"ContainerDied","Data":"37479772f97e43e4755e6d9eb0ebf34db29c771973543d3c5c34a49e93643e56"} Jan 04 13:12:31 crc kubenswrapper[5003]: I0104 13:12:31.105027 5003 generic.go:334] "Generic (PLEG): container finished" podID="891e210d-2fe4-4439-905f-e36b8e427eb8" containerID="786a99417986cd76ae457dd06385e14e3d543a53f77fc61005746017eaf7d574" exitCode=0 Jan 04 13:12:31 crc kubenswrapper[5003]: I0104 13:12:31.105096 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"891e210d-2fe4-4439-905f-e36b8e427eb8","Type":"ContainerDied","Data":"786a99417986cd76ae457dd06385e14e3d543a53f77fc61005746017eaf7d574"} Jan 04 13:12:31 crc kubenswrapper[5003]: I0104 13:12:31.474844 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-rh47m" Jan 04 13:12:31 crc kubenswrapper[5003]: I0104 13:12:31.637276 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gwwvm\" (UniqueName: \"kubernetes.io/projected/9a464e30-2e05-4592-940e-5335bb92b086-kube-api-access-gwwvm\") pod \"9a464e30-2e05-4592-940e-5335bb92b086\" (UID: \"9a464e30-2e05-4592-940e-5335bb92b086\") " Jan 04 13:12:31 crc kubenswrapper[5003]: I0104 13:12:31.637678 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9a464e30-2e05-4592-940e-5335bb92b086-operator-scripts\") pod \"9a464e30-2e05-4592-940e-5335bb92b086\" (UID: \"9a464e30-2e05-4592-940e-5335bb92b086\") " Jan 04 13:12:31 crc kubenswrapper[5003]: I0104 13:12:31.638177 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9a464e30-2e05-4592-940e-5335bb92b086-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9a464e30-2e05-4592-940e-5335bb92b086" (UID: "9a464e30-2e05-4592-940e-5335bb92b086"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 13:12:31 crc kubenswrapper[5003]: I0104 13:12:31.638442 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9a464e30-2e05-4592-940e-5335bb92b086-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 13:12:31 crc kubenswrapper[5003]: I0104 13:12:31.640849 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a464e30-2e05-4592-940e-5335bb92b086-kube-api-access-gwwvm" (OuterVolumeSpecName: "kube-api-access-gwwvm") pod "9a464e30-2e05-4592-940e-5335bb92b086" (UID: "9a464e30-2e05-4592-940e-5335bb92b086"). InnerVolumeSpecName "kube-api-access-gwwvm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:12:31 crc kubenswrapper[5003]: I0104 13:12:31.740735 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gwwvm\" (UniqueName: \"kubernetes.io/projected/9a464e30-2e05-4592-940e-5335bb92b086-kube-api-access-gwwvm\") on node \"crc\" DevicePath \"\"" Jan 04 13:12:32 crc kubenswrapper[5003]: I0104 13:12:32.115337 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"891e210d-2fe4-4439-905f-e36b8e427eb8","Type":"ContainerStarted","Data":"29d402f6df851cff501dd417cf5fa58a98739de4a58e3c08d0051981ce64a6f7"} Jan 04 13:12:32 crc kubenswrapper[5003]: I0104 13:12:32.116141 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:12:32 crc kubenswrapper[5003]: I0104 13:12:32.117950 5003 generic.go:334] "Generic (PLEG): container finished" podID="3a5ca80d-195f-4290-8db3-79ebbe58387c" containerID="3cb5a4fb779d57b95b81bef8e28f5af91419fa60622983e5e7d1178cb874f937" exitCode=0 Jan 04 13:12:32 crc kubenswrapper[5003]: I0104 13:12:32.118028 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"3a5ca80d-195f-4290-8db3-79ebbe58387c","Type":"ContainerDied","Data":"3cb5a4fb779d57b95b81bef8e28f5af91419fa60622983e5e7d1178cb874f937"} Jan 04 13:12:32 crc kubenswrapper[5003]: I0104 13:12:32.120249 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-rh47m" event={"ID":"9a464e30-2e05-4592-940e-5335bb92b086","Type":"ContainerDied","Data":"c62b35f730a09d55865823176053e707ffcd8151dd4cebfcd3266745583c8b10"} Jan 04 13:12:32 crc kubenswrapper[5003]: I0104 13:12:32.120297 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c62b35f730a09d55865823176053e707ffcd8151dd4cebfcd3266745583c8b10" Jan 04 13:12:32 crc kubenswrapper[5003]: I0104 13:12:32.120402 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-rh47m" Jan 04 13:12:32 crc kubenswrapper[5003]: I0104 13:12:32.143284 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=38.143265296 podStartE2EDuration="38.143265296s" podCreationTimestamp="2026-01-04 13:11:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 13:12:32.137307008 +0000 UTC m=+5067.610336879" watchObservedRunningTime="2026-01-04 13:12:32.143265296 +0000 UTC m=+5067.616295137" Jan 04 13:12:33 crc kubenswrapper[5003]: I0104 13:12:33.130150 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"3a5ca80d-195f-4290-8db3-79ebbe58387c","Type":"ContainerStarted","Data":"dd83f353f2c4796a7723dfb7f6f42205c50f816ab15f07979d2b8e95b8fda0a6"} Jan 04 13:12:33 crc kubenswrapper[5003]: I0104 13:12:33.130546 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Jan 04 13:12:35 crc kubenswrapper[5003]: I0104 13:12:35.922198 5003 scope.go:117] "RemoveContainer" containerID="ab89a3503b868c615a10b0dddcf9602282c1df798b3f18e301d8cf21d5ab3516" Jan 04 13:12:35 crc kubenswrapper[5003]: E0104 13:12:35.923377 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:12:46 crc kubenswrapper[5003]: I0104 13:12:46.371251 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:12:46 crc kubenswrapper[5003]: I0104 13:12:46.411401 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=51.411373189 podStartE2EDuration="51.411373189s" podCreationTimestamp="2026-01-04 13:11:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 13:12:33.161955716 +0000 UTC m=+5068.634985597" watchObservedRunningTime="2026-01-04 13:12:46.411373189 +0000 UTC m=+5081.884403070" Jan 04 13:12:47 crc kubenswrapper[5003]: I0104 13:12:47.193410 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Jan 04 13:12:50 crc kubenswrapper[5003]: I0104 13:12:50.807209 5003 scope.go:117] "RemoveContainer" containerID="ab89a3503b868c615a10b0dddcf9602282c1df798b3f18e301d8cf21d5ab3516" Jan 04 13:12:50 crc kubenswrapper[5003]: E0104 13:12:50.808238 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:12:52 crc kubenswrapper[5003]: I0104 13:12:52.229200 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-699964fbc-8crc2"] Jan 04 13:12:52 crc kubenswrapper[5003]: E0104 13:12:52.229609 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a464e30-2e05-4592-940e-5335bb92b086" containerName="mariadb-account-create-update" Jan 04 13:12:52 crc kubenswrapper[5003]: I0104 13:12:52.229625 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a464e30-2e05-4592-940e-5335bb92b086" containerName="mariadb-account-create-update" Jan 04 13:12:52 crc kubenswrapper[5003]: I0104 13:12:52.229806 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a464e30-2e05-4592-940e-5335bb92b086" containerName="mariadb-account-create-update" Jan 04 13:12:52 crc kubenswrapper[5003]: I0104 13:12:52.230842 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-699964fbc-8crc2" Jan 04 13:12:52 crc kubenswrapper[5003]: I0104 13:12:52.241035 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqtzf\" (UniqueName: \"kubernetes.io/projected/f98a83d6-cc3b-4cd3-8a74-7d7f951c508a-kube-api-access-jqtzf\") pod \"dnsmasq-dns-699964fbc-8crc2\" (UID: \"f98a83d6-cc3b-4cd3-8a74-7d7f951c508a\") " pod="openstack/dnsmasq-dns-699964fbc-8crc2" Jan 04 13:12:52 crc kubenswrapper[5003]: I0104 13:12:52.241096 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f98a83d6-cc3b-4cd3-8a74-7d7f951c508a-config\") pod \"dnsmasq-dns-699964fbc-8crc2\" (UID: \"f98a83d6-cc3b-4cd3-8a74-7d7f951c508a\") " pod="openstack/dnsmasq-dns-699964fbc-8crc2" Jan 04 13:12:52 crc kubenswrapper[5003]: I0104 13:12:52.241131 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f98a83d6-cc3b-4cd3-8a74-7d7f951c508a-dns-svc\") pod \"dnsmasq-dns-699964fbc-8crc2\" (UID: \"f98a83d6-cc3b-4cd3-8a74-7d7f951c508a\") " pod="openstack/dnsmasq-dns-699964fbc-8crc2" Jan 04 13:12:52 crc kubenswrapper[5003]: I0104 13:12:52.250871 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-699964fbc-8crc2"] Jan 04 13:12:52 crc kubenswrapper[5003]: I0104 13:12:52.343107 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f98a83d6-cc3b-4cd3-8a74-7d7f951c508a-dns-svc\") pod \"dnsmasq-dns-699964fbc-8crc2\" (UID: \"f98a83d6-cc3b-4cd3-8a74-7d7f951c508a\") " pod="openstack/dnsmasq-dns-699964fbc-8crc2" Jan 04 13:12:52 crc kubenswrapper[5003]: I0104 13:12:52.343457 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqtzf\" (UniqueName: \"kubernetes.io/projected/f98a83d6-cc3b-4cd3-8a74-7d7f951c508a-kube-api-access-jqtzf\") pod \"dnsmasq-dns-699964fbc-8crc2\" (UID: \"f98a83d6-cc3b-4cd3-8a74-7d7f951c508a\") " pod="openstack/dnsmasq-dns-699964fbc-8crc2" Jan 04 13:12:52 crc kubenswrapper[5003]: I0104 13:12:52.343497 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f98a83d6-cc3b-4cd3-8a74-7d7f951c508a-config\") pod \"dnsmasq-dns-699964fbc-8crc2\" (UID: \"f98a83d6-cc3b-4cd3-8a74-7d7f951c508a\") " pod="openstack/dnsmasq-dns-699964fbc-8crc2" Jan 04 13:12:52 crc kubenswrapper[5003]: I0104 13:12:52.344370 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f98a83d6-cc3b-4cd3-8a74-7d7f951c508a-config\") pod \"dnsmasq-dns-699964fbc-8crc2\" (UID: \"f98a83d6-cc3b-4cd3-8a74-7d7f951c508a\") " pod="openstack/dnsmasq-dns-699964fbc-8crc2" Jan 04 13:12:52 crc kubenswrapper[5003]: I0104 13:12:52.344905 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f98a83d6-cc3b-4cd3-8a74-7d7f951c508a-dns-svc\") pod \"dnsmasq-dns-699964fbc-8crc2\" (UID: \"f98a83d6-cc3b-4cd3-8a74-7d7f951c508a\") " pod="openstack/dnsmasq-dns-699964fbc-8crc2" Jan 04 13:12:52 crc kubenswrapper[5003]: I0104 13:12:52.363580 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqtzf\" (UniqueName: \"kubernetes.io/projected/f98a83d6-cc3b-4cd3-8a74-7d7f951c508a-kube-api-access-jqtzf\") pod \"dnsmasq-dns-699964fbc-8crc2\" (UID: \"f98a83d6-cc3b-4cd3-8a74-7d7f951c508a\") " pod="openstack/dnsmasq-dns-699964fbc-8crc2" Jan 04 13:12:52 crc kubenswrapper[5003]: I0104 13:12:52.554341 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-699964fbc-8crc2" Jan 04 13:12:52 crc kubenswrapper[5003]: I0104 13:12:52.965953 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-699964fbc-8crc2"] Jan 04 13:12:52 crc kubenswrapper[5003]: I0104 13:12:52.984562 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 04 13:12:53 crc kubenswrapper[5003]: I0104 13:12:53.323672 5003 generic.go:334] "Generic (PLEG): container finished" podID="f98a83d6-cc3b-4cd3-8a74-7d7f951c508a" containerID="2c14c22ab7c22e0811e7b05d28682e63637854b78491edd0277a9fafc181d924" exitCode=0 Jan 04 13:12:53 crc kubenswrapper[5003]: I0104 13:12:53.323769 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-699964fbc-8crc2" event={"ID":"f98a83d6-cc3b-4cd3-8a74-7d7f951c508a","Type":"ContainerDied","Data":"2c14c22ab7c22e0811e7b05d28682e63637854b78491edd0277a9fafc181d924"} Jan 04 13:12:53 crc kubenswrapper[5003]: I0104 13:12:53.323994 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-699964fbc-8crc2" event={"ID":"f98a83d6-cc3b-4cd3-8a74-7d7f951c508a","Type":"ContainerStarted","Data":"444f1f7f729e62a73e13a81d5c8bd6052fe7b19e98060c5757e62c983fff7225"} Jan 04 13:12:53 crc kubenswrapper[5003]: I0104 13:12:53.732053 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 04 13:12:54 crc kubenswrapper[5003]: I0104 13:12:54.337628 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-699964fbc-8crc2" event={"ID":"f98a83d6-cc3b-4cd3-8a74-7d7f951c508a","Type":"ContainerStarted","Data":"66ea08ebf9f39246682f7bda7683758aaec8ec1db5ab0c4096ccaff0a4b77f4e"} Jan 04 13:12:54 crc kubenswrapper[5003]: I0104 13:12:54.340331 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-699964fbc-8crc2" Jan 04 13:12:54 crc kubenswrapper[5003]: I0104 13:12:54.362497 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-699964fbc-8crc2" podStartSLOduration=2.362479853 podStartE2EDuration="2.362479853s" podCreationTimestamp="2026-01-04 13:12:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 13:12:54.361772144 +0000 UTC m=+5089.834801985" watchObservedRunningTime="2026-01-04 13:12:54.362479853 +0000 UTC m=+5089.835509694" Jan 04 13:12:56 crc kubenswrapper[5003]: I0104 13:12:56.744506 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="3a5ca80d-195f-4290-8db3-79ebbe58387c" containerName="rabbitmq" containerID="cri-o://dd83f353f2c4796a7723dfb7f6f42205c50f816ab15f07979d2b8e95b8fda0a6" gracePeriod=604797 Jan 04 13:12:57 crc kubenswrapper[5003]: I0104 13:12:57.190974 5003 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="3a5ca80d-195f-4290-8db3-79ebbe58387c" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.246:5671: connect: connection refused" Jan 04 13:12:57 crc kubenswrapper[5003]: I0104 13:12:57.782799 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="891e210d-2fe4-4439-905f-e36b8e427eb8" containerName="rabbitmq" containerID="cri-o://29d402f6df851cff501dd417cf5fa58a98739de4a58e3c08d0051981ce64a6f7" gracePeriod=604796 Jan 04 13:13:02 crc kubenswrapper[5003]: I0104 13:13:02.556309 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-699964fbc-8crc2" Jan 04 13:13:02 crc kubenswrapper[5003]: I0104 13:13:02.617709 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d79f765b5-72qsj"] Jan 04 13:13:02 crc kubenswrapper[5003]: I0104 13:13:02.618150 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5d79f765b5-72qsj" podUID="330d5594-4379-4cb8-80af-0af1c0ff29e8" containerName="dnsmasq-dns" containerID="cri-o://2078f607e923af61374a8be1a095eb75403c65fb3851f4b2f73e9a465877aedf" gracePeriod=10 Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.075089 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d79f765b5-72qsj" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.154308 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jzdtv\" (UniqueName: \"kubernetes.io/projected/330d5594-4379-4cb8-80af-0af1c0ff29e8-kube-api-access-jzdtv\") pod \"330d5594-4379-4cb8-80af-0af1c0ff29e8\" (UID: \"330d5594-4379-4cb8-80af-0af1c0ff29e8\") " Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.154361 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/330d5594-4379-4cb8-80af-0af1c0ff29e8-config\") pod \"330d5594-4379-4cb8-80af-0af1c0ff29e8\" (UID: \"330d5594-4379-4cb8-80af-0af1c0ff29e8\") " Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.154399 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/330d5594-4379-4cb8-80af-0af1c0ff29e8-dns-svc\") pod \"330d5594-4379-4cb8-80af-0af1c0ff29e8\" (UID: \"330d5594-4379-4cb8-80af-0af1c0ff29e8\") " Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.165394 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/330d5594-4379-4cb8-80af-0af1c0ff29e8-kube-api-access-jzdtv" (OuterVolumeSpecName: "kube-api-access-jzdtv") pod "330d5594-4379-4cb8-80af-0af1c0ff29e8" (UID: "330d5594-4379-4cb8-80af-0af1c0ff29e8"). InnerVolumeSpecName "kube-api-access-jzdtv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.200144 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/330d5594-4379-4cb8-80af-0af1c0ff29e8-config" (OuterVolumeSpecName: "config") pod "330d5594-4379-4cb8-80af-0af1c0ff29e8" (UID: "330d5594-4379-4cb8-80af-0af1c0ff29e8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.207161 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/330d5594-4379-4cb8-80af-0af1c0ff29e8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "330d5594-4379-4cb8-80af-0af1c0ff29e8" (UID: "330d5594-4379-4cb8-80af-0af1c0ff29e8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.254667 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.256345 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jzdtv\" (UniqueName: \"kubernetes.io/projected/330d5594-4379-4cb8-80af-0af1c0ff29e8-kube-api-access-jzdtv\") on node \"crc\" DevicePath \"\"" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.256373 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/330d5594-4379-4cb8-80af-0af1c0ff29e8-config\") on node \"crc\" DevicePath \"\"" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.256384 5003 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/330d5594-4379-4cb8-80af-0af1c0ff29e8-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.358005 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-352d767c-80b9-450f-952a-c5d59cfb3e97\") pod \"3a5ca80d-195f-4290-8db3-79ebbe58387c\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.358078 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3a5ca80d-195f-4290-8db3-79ebbe58387c-rabbitmq-confd\") pod \"3a5ca80d-195f-4290-8db3-79ebbe58387c\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.358132 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9frxp\" (UniqueName: \"kubernetes.io/projected/3a5ca80d-195f-4290-8db3-79ebbe58387c-kube-api-access-9frxp\") pod \"3a5ca80d-195f-4290-8db3-79ebbe58387c\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.358171 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3a5ca80d-195f-4290-8db3-79ebbe58387c-plugins-conf\") pod \"3a5ca80d-195f-4290-8db3-79ebbe58387c\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.358196 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3a5ca80d-195f-4290-8db3-79ebbe58387c-rabbitmq-erlang-cookie\") pod \"3a5ca80d-195f-4290-8db3-79ebbe58387c\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.358253 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3a5ca80d-195f-4290-8db3-79ebbe58387c-server-conf\") pod \"3a5ca80d-195f-4290-8db3-79ebbe58387c\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.358292 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3a5ca80d-195f-4290-8db3-79ebbe58387c-rabbitmq-tls\") pod \"3a5ca80d-195f-4290-8db3-79ebbe58387c\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.358325 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3a5ca80d-195f-4290-8db3-79ebbe58387c-rabbitmq-plugins\") pod \"3a5ca80d-195f-4290-8db3-79ebbe58387c\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.358371 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3a5ca80d-195f-4290-8db3-79ebbe58387c-config-data\") pod \"3a5ca80d-195f-4290-8db3-79ebbe58387c\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.358393 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3a5ca80d-195f-4290-8db3-79ebbe58387c-pod-info\") pod \"3a5ca80d-195f-4290-8db3-79ebbe58387c\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.358415 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3a5ca80d-195f-4290-8db3-79ebbe58387c-erlang-cookie-secret\") pod \"3a5ca80d-195f-4290-8db3-79ebbe58387c\" (UID: \"3a5ca80d-195f-4290-8db3-79ebbe58387c\") " Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.359192 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a5ca80d-195f-4290-8db3-79ebbe58387c-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "3a5ca80d-195f-4290-8db3-79ebbe58387c" (UID: "3a5ca80d-195f-4290-8db3-79ebbe58387c"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.362436 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a5ca80d-195f-4290-8db3-79ebbe58387c-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "3a5ca80d-195f-4290-8db3-79ebbe58387c" (UID: "3a5ca80d-195f-4290-8db3-79ebbe58387c"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.363033 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a5ca80d-195f-4290-8db3-79ebbe58387c-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "3a5ca80d-195f-4290-8db3-79ebbe58387c" (UID: "3a5ca80d-195f-4290-8db3-79ebbe58387c"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.365191 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a5ca80d-195f-4290-8db3-79ebbe58387c-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "3a5ca80d-195f-4290-8db3-79ebbe58387c" (UID: "3a5ca80d-195f-4290-8db3-79ebbe58387c"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.365578 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a5ca80d-195f-4290-8db3-79ebbe58387c-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "3a5ca80d-195f-4290-8db3-79ebbe58387c" (UID: "3a5ca80d-195f-4290-8db3-79ebbe58387c"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.366849 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/3a5ca80d-195f-4290-8db3-79ebbe58387c-pod-info" (OuterVolumeSpecName: "pod-info") pod "3a5ca80d-195f-4290-8db3-79ebbe58387c" (UID: "3a5ca80d-195f-4290-8db3-79ebbe58387c"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.369722 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a5ca80d-195f-4290-8db3-79ebbe58387c-kube-api-access-9frxp" (OuterVolumeSpecName: "kube-api-access-9frxp") pod "3a5ca80d-195f-4290-8db3-79ebbe58387c" (UID: "3a5ca80d-195f-4290-8db3-79ebbe58387c"). InnerVolumeSpecName "kube-api-access-9frxp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.378656 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-352d767c-80b9-450f-952a-c5d59cfb3e97" (OuterVolumeSpecName: "persistence") pod "3a5ca80d-195f-4290-8db3-79ebbe58387c" (UID: "3a5ca80d-195f-4290-8db3-79ebbe58387c"). InnerVolumeSpecName "pvc-352d767c-80b9-450f-952a-c5d59cfb3e97". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.379175 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a5ca80d-195f-4290-8db3-79ebbe58387c-config-data" (OuterVolumeSpecName: "config-data") pod "3a5ca80d-195f-4290-8db3-79ebbe58387c" (UID: "3a5ca80d-195f-4290-8db3-79ebbe58387c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.408572 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a5ca80d-195f-4290-8db3-79ebbe58387c-server-conf" (OuterVolumeSpecName: "server-conf") pod "3a5ca80d-195f-4290-8db3-79ebbe58387c" (UID: "3a5ca80d-195f-4290-8db3-79ebbe58387c"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.432483 5003 generic.go:334] "Generic (PLEG): container finished" podID="330d5594-4379-4cb8-80af-0af1c0ff29e8" containerID="2078f607e923af61374a8be1a095eb75403c65fb3851f4b2f73e9a465877aedf" exitCode=0 Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.432547 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d79f765b5-72qsj" event={"ID":"330d5594-4379-4cb8-80af-0af1c0ff29e8","Type":"ContainerDied","Data":"2078f607e923af61374a8be1a095eb75403c65fb3851f4b2f73e9a465877aedf"} Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.432576 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d79f765b5-72qsj" event={"ID":"330d5594-4379-4cb8-80af-0af1c0ff29e8","Type":"ContainerDied","Data":"df8abc7270dbb53778dc819b87ca09da67b55049b76b24f0ea3fa28f21a2ba3b"} Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.432593 5003 scope.go:117] "RemoveContainer" containerID="2078f607e923af61374a8be1a095eb75403c65fb3851f4b2f73e9a465877aedf" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.432730 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d79f765b5-72qsj" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.455353 5003 generic.go:334] "Generic (PLEG): container finished" podID="3a5ca80d-195f-4290-8db3-79ebbe58387c" containerID="dd83f353f2c4796a7723dfb7f6f42205c50f816ab15f07979d2b8e95b8fda0a6" exitCode=0 Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.455413 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"3a5ca80d-195f-4290-8db3-79ebbe58387c","Type":"ContainerDied","Data":"dd83f353f2c4796a7723dfb7f6f42205c50f816ab15f07979d2b8e95b8fda0a6"} Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.455450 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"3a5ca80d-195f-4290-8db3-79ebbe58387c","Type":"ContainerDied","Data":"29edd86384a1c0b5e7a569210efa9efc107b4461d83045399ae946d5f9d88848"} Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.455454 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.459267 5003 scope.go:117] "RemoveContainer" containerID="94f0fe0081cc4f496813295eeea943deebc36c72838bfe88ad923b7793573448" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.459335 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a5ca80d-195f-4290-8db3-79ebbe58387c-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "3a5ca80d-195f-4290-8db3-79ebbe58387c" (UID: "3a5ca80d-195f-4290-8db3-79ebbe58387c"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.460288 5003 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3a5ca80d-195f-4290-8db3-79ebbe58387c-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.460319 5003 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3a5ca80d-195f-4290-8db3-79ebbe58387c-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.460331 5003 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3a5ca80d-195f-4290-8db3-79ebbe58387c-server-conf\") on node \"crc\" DevicePath \"\"" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.460340 5003 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3a5ca80d-195f-4290-8db3-79ebbe58387c-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.460349 5003 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3a5ca80d-195f-4290-8db3-79ebbe58387c-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.460358 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3a5ca80d-195f-4290-8db3-79ebbe58387c-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.460365 5003 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3a5ca80d-195f-4290-8db3-79ebbe58387c-pod-info\") on node \"crc\" DevicePath \"\"" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.460376 5003 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3a5ca80d-195f-4290-8db3-79ebbe58387c-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.460408 5003 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-352d767c-80b9-450f-952a-c5d59cfb3e97\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-352d767c-80b9-450f-952a-c5d59cfb3e97\") on node \"crc\" " Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.460420 5003 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3a5ca80d-195f-4290-8db3-79ebbe58387c-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.460429 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9frxp\" (UniqueName: \"kubernetes.io/projected/3a5ca80d-195f-4290-8db3-79ebbe58387c-kube-api-access-9frxp\") on node \"crc\" DevicePath \"\"" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.488122 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d79f765b5-72qsj"] Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.502862 5003 scope.go:117] "RemoveContainer" containerID="2078f607e923af61374a8be1a095eb75403c65fb3851f4b2f73e9a465877aedf" Jan 04 13:13:03 crc kubenswrapper[5003]: E0104 13:13:03.504393 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2078f607e923af61374a8be1a095eb75403c65fb3851f4b2f73e9a465877aedf\": container with ID starting with 2078f607e923af61374a8be1a095eb75403c65fb3851f4b2f73e9a465877aedf not found: ID does not exist" containerID="2078f607e923af61374a8be1a095eb75403c65fb3851f4b2f73e9a465877aedf" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.504432 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2078f607e923af61374a8be1a095eb75403c65fb3851f4b2f73e9a465877aedf"} err="failed to get container status \"2078f607e923af61374a8be1a095eb75403c65fb3851f4b2f73e9a465877aedf\": rpc error: code = NotFound desc = could not find container \"2078f607e923af61374a8be1a095eb75403c65fb3851f4b2f73e9a465877aedf\": container with ID starting with 2078f607e923af61374a8be1a095eb75403c65fb3851f4b2f73e9a465877aedf not found: ID does not exist" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.504455 5003 scope.go:117] "RemoveContainer" containerID="94f0fe0081cc4f496813295eeea943deebc36c72838bfe88ad923b7793573448" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.505659 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5d79f765b5-72qsj"] Jan 04 13:13:03 crc kubenswrapper[5003]: E0104 13:13:03.507299 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94f0fe0081cc4f496813295eeea943deebc36c72838bfe88ad923b7793573448\": container with ID starting with 94f0fe0081cc4f496813295eeea943deebc36c72838bfe88ad923b7793573448 not found: ID does not exist" containerID="94f0fe0081cc4f496813295eeea943deebc36c72838bfe88ad923b7793573448" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.507327 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94f0fe0081cc4f496813295eeea943deebc36c72838bfe88ad923b7793573448"} err="failed to get container status \"94f0fe0081cc4f496813295eeea943deebc36c72838bfe88ad923b7793573448\": rpc error: code = NotFound desc = could not find container \"94f0fe0081cc4f496813295eeea943deebc36c72838bfe88ad923b7793573448\": container with ID starting with 94f0fe0081cc4f496813295eeea943deebc36c72838bfe88ad923b7793573448 not found: ID does not exist" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.507343 5003 scope.go:117] "RemoveContainer" containerID="dd83f353f2c4796a7723dfb7f6f42205c50f816ab15f07979d2b8e95b8fda0a6" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.532226 5003 scope.go:117] "RemoveContainer" containerID="3cb5a4fb779d57b95b81bef8e28f5af91419fa60622983e5e7d1178cb874f937" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.533639 5003 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.534289 5003 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-352d767c-80b9-450f-952a-c5d59cfb3e97" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-352d767c-80b9-450f-952a-c5d59cfb3e97") on node "crc" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.558894 5003 scope.go:117] "RemoveContainer" containerID="dd83f353f2c4796a7723dfb7f6f42205c50f816ab15f07979d2b8e95b8fda0a6" Jan 04 13:13:03 crc kubenswrapper[5003]: E0104 13:13:03.560388 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd83f353f2c4796a7723dfb7f6f42205c50f816ab15f07979d2b8e95b8fda0a6\": container with ID starting with dd83f353f2c4796a7723dfb7f6f42205c50f816ab15f07979d2b8e95b8fda0a6 not found: ID does not exist" containerID="dd83f353f2c4796a7723dfb7f6f42205c50f816ab15f07979d2b8e95b8fda0a6" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.560727 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd83f353f2c4796a7723dfb7f6f42205c50f816ab15f07979d2b8e95b8fda0a6"} err="failed to get container status \"dd83f353f2c4796a7723dfb7f6f42205c50f816ab15f07979d2b8e95b8fda0a6\": rpc error: code = NotFound desc = could not find container \"dd83f353f2c4796a7723dfb7f6f42205c50f816ab15f07979d2b8e95b8fda0a6\": container with ID starting with dd83f353f2c4796a7723dfb7f6f42205c50f816ab15f07979d2b8e95b8fda0a6 not found: ID does not exist" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.560756 5003 scope.go:117] "RemoveContainer" containerID="3cb5a4fb779d57b95b81bef8e28f5af91419fa60622983e5e7d1178cb874f937" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.561960 5003 reconciler_common.go:293] "Volume detached for volume \"pvc-352d767c-80b9-450f-952a-c5d59cfb3e97\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-352d767c-80b9-450f-952a-c5d59cfb3e97\") on node \"crc\" DevicePath \"\"" Jan 04 13:13:03 crc kubenswrapper[5003]: E0104 13:13:03.565081 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3cb5a4fb779d57b95b81bef8e28f5af91419fa60622983e5e7d1178cb874f937\": container with ID starting with 3cb5a4fb779d57b95b81bef8e28f5af91419fa60622983e5e7d1178cb874f937 not found: ID does not exist" containerID="3cb5a4fb779d57b95b81bef8e28f5af91419fa60622983e5e7d1178cb874f937" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.565112 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3cb5a4fb779d57b95b81bef8e28f5af91419fa60622983e5e7d1178cb874f937"} err="failed to get container status \"3cb5a4fb779d57b95b81bef8e28f5af91419fa60622983e5e7d1178cb874f937\": rpc error: code = NotFound desc = could not find container \"3cb5a4fb779d57b95b81bef8e28f5af91419fa60622983e5e7d1178cb874f937\": container with ID starting with 3cb5a4fb779d57b95b81bef8e28f5af91419fa60622983e5e7d1178cb874f937 not found: ID does not exist" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.857519 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.862643 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.884200 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Jan 04 13:13:03 crc kubenswrapper[5003]: E0104 13:13:03.884815 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="330d5594-4379-4cb8-80af-0af1c0ff29e8" containerName="dnsmasq-dns" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.884902 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="330d5594-4379-4cb8-80af-0af1c0ff29e8" containerName="dnsmasq-dns" Jan 04 13:13:03 crc kubenswrapper[5003]: E0104 13:13:03.884975 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="330d5594-4379-4cb8-80af-0af1c0ff29e8" containerName="init" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.885110 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="330d5594-4379-4cb8-80af-0af1c0ff29e8" containerName="init" Jan 04 13:13:03 crc kubenswrapper[5003]: E0104 13:13:03.885187 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a5ca80d-195f-4290-8db3-79ebbe58387c" containerName="setup-container" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.885264 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a5ca80d-195f-4290-8db3-79ebbe58387c" containerName="setup-container" Jan 04 13:13:03 crc kubenswrapper[5003]: E0104 13:13:03.885331 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a5ca80d-195f-4290-8db3-79ebbe58387c" containerName="rabbitmq" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.885392 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a5ca80d-195f-4290-8db3-79ebbe58387c" containerName="rabbitmq" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.885611 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="330d5594-4379-4cb8-80af-0af1c0ff29e8" containerName="dnsmasq-dns" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.885729 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a5ca80d-195f-4290-8db3-79ebbe58387c" containerName="rabbitmq" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.886645 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.889884 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.889988 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.890007 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.891419 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.891511 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.891554 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.891515 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-h58fc" Jan 04 13:13:03 crc kubenswrapper[5003]: I0104 13:13:03.918414 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.073235 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8934ee0c-1985-4bc1-95cb-50fcbbf7909d-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.073296 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8934ee0c-1985-4bc1-95cb-50fcbbf7909d-pod-info\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.073333 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkbnd\" (UniqueName: \"kubernetes.io/projected/8934ee0c-1985-4bc1-95cb-50fcbbf7909d-kube-api-access-nkbnd\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.073398 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8934ee0c-1985-4bc1-95cb-50fcbbf7909d-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.073430 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8934ee0c-1985-4bc1-95cb-50fcbbf7909d-config-data\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.073472 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-352d767c-80b9-450f-952a-c5d59cfb3e97\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-352d767c-80b9-450f-952a-c5d59cfb3e97\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.073496 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8934ee0c-1985-4bc1-95cb-50fcbbf7909d-server-conf\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.073563 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8934ee0c-1985-4bc1-95cb-50fcbbf7909d-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.073586 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8934ee0c-1985-4bc1-95cb-50fcbbf7909d-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.073605 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8934ee0c-1985-4bc1-95cb-50fcbbf7909d-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.073623 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8934ee0c-1985-4bc1-95cb-50fcbbf7909d-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.175481 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-352d767c-80b9-450f-952a-c5d59cfb3e97\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-352d767c-80b9-450f-952a-c5d59cfb3e97\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.175539 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8934ee0c-1985-4bc1-95cb-50fcbbf7909d-server-conf\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.175571 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8934ee0c-1985-4bc1-95cb-50fcbbf7909d-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.175592 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8934ee0c-1985-4bc1-95cb-50fcbbf7909d-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.175610 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8934ee0c-1985-4bc1-95cb-50fcbbf7909d-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.175631 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8934ee0c-1985-4bc1-95cb-50fcbbf7909d-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.175688 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8934ee0c-1985-4bc1-95cb-50fcbbf7909d-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.175706 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8934ee0c-1985-4bc1-95cb-50fcbbf7909d-pod-info\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.175724 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkbnd\" (UniqueName: \"kubernetes.io/projected/8934ee0c-1985-4bc1-95cb-50fcbbf7909d-kube-api-access-nkbnd\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.175761 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8934ee0c-1985-4bc1-95cb-50fcbbf7909d-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.175777 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8934ee0c-1985-4bc1-95cb-50fcbbf7909d-config-data\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.177322 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8934ee0c-1985-4bc1-95cb-50fcbbf7909d-config-data\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.177981 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8934ee0c-1985-4bc1-95cb-50fcbbf7909d-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.178056 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8934ee0c-1985-4bc1-95cb-50fcbbf7909d-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.178113 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8934ee0c-1985-4bc1-95cb-50fcbbf7909d-server-conf\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.178183 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8934ee0c-1985-4bc1-95cb-50fcbbf7909d-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.179668 5003 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.179708 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-352d767c-80b9-450f-952a-c5d59cfb3e97\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-352d767c-80b9-450f-952a-c5d59cfb3e97\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/d6efa95f8d59fc20d38a71435bb191e9af886aae1d88deb62d6e46d114b2a718/globalmount\"" pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.183389 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8934ee0c-1985-4bc1-95cb-50fcbbf7909d-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.183568 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8934ee0c-1985-4bc1-95cb-50fcbbf7909d-pod-info\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.183940 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8934ee0c-1985-4bc1-95cb-50fcbbf7909d-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.196517 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkbnd\" (UniqueName: \"kubernetes.io/projected/8934ee0c-1985-4bc1-95cb-50fcbbf7909d-kube-api-access-nkbnd\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.200841 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8934ee0c-1985-4bc1-95cb-50fcbbf7909d-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.214451 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-352d767c-80b9-450f-952a-c5d59cfb3e97\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-352d767c-80b9-450f-952a-c5d59cfb3e97\") pod \"rabbitmq-server-0\" (UID: \"8934ee0c-1985-4bc1-95cb-50fcbbf7909d\") " pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.273186 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.327423 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.473608 5003 generic.go:334] "Generic (PLEG): container finished" podID="891e210d-2fe4-4439-905f-e36b8e427eb8" containerID="29d402f6df851cff501dd417cf5fa58a98739de4a58e3c08d0051981ce64a6f7" exitCode=0 Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.473660 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"891e210d-2fe4-4439-905f-e36b8e427eb8","Type":"ContainerDied","Data":"29d402f6df851cff501dd417cf5fa58a98739de4a58e3c08d0051981ce64a6f7"} Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.473697 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"891e210d-2fe4-4439-905f-e36b8e427eb8","Type":"ContainerDied","Data":"2886769705e62f3525a4e0cfe377fe2fafe154d166a25621c8f0d6efa512de25"} Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.473721 5003 scope.go:117] "RemoveContainer" containerID="29d402f6df851cff501dd417cf5fa58a98739de4a58e3c08d0051981ce64a6f7" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.473777 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.479896 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/891e210d-2fe4-4439-905f-e36b8e427eb8-erlang-cookie-secret\") pod \"891e210d-2fe4-4439-905f-e36b8e427eb8\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.479933 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/891e210d-2fe4-4439-905f-e36b8e427eb8-server-conf\") pod \"891e210d-2fe4-4439-905f-e36b8e427eb8\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.480000 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/891e210d-2fe4-4439-905f-e36b8e427eb8-rabbitmq-tls\") pod \"891e210d-2fe4-4439-905f-e36b8e427eb8\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.480057 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdxfl\" (UniqueName: \"kubernetes.io/projected/891e210d-2fe4-4439-905f-e36b8e427eb8-kube-api-access-cdxfl\") pod \"891e210d-2fe4-4439-905f-e36b8e427eb8\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.480114 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/891e210d-2fe4-4439-905f-e36b8e427eb8-plugins-conf\") pod \"891e210d-2fe4-4439-905f-e36b8e427eb8\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.480166 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/891e210d-2fe4-4439-905f-e36b8e427eb8-rabbitmq-plugins\") pod \"891e210d-2fe4-4439-905f-e36b8e427eb8\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.480192 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/891e210d-2fe4-4439-905f-e36b8e427eb8-rabbitmq-confd\") pod \"891e210d-2fe4-4439-905f-e36b8e427eb8\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.480240 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/891e210d-2fe4-4439-905f-e36b8e427eb8-pod-info\") pod \"891e210d-2fe4-4439-905f-e36b8e427eb8\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.480260 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/891e210d-2fe4-4439-905f-e36b8e427eb8-config-data\") pod \"891e210d-2fe4-4439-905f-e36b8e427eb8\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.480372 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5b85a27d-231a-48e1-b725-5f24c41608d1\") pod \"891e210d-2fe4-4439-905f-e36b8e427eb8\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.480408 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/891e210d-2fe4-4439-905f-e36b8e427eb8-rabbitmq-erlang-cookie\") pod \"891e210d-2fe4-4439-905f-e36b8e427eb8\" (UID: \"891e210d-2fe4-4439-905f-e36b8e427eb8\") " Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.481111 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/891e210d-2fe4-4439-905f-e36b8e427eb8-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "891e210d-2fe4-4439-905f-e36b8e427eb8" (UID: "891e210d-2fe4-4439-905f-e36b8e427eb8"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.483158 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/891e210d-2fe4-4439-905f-e36b8e427eb8-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "891e210d-2fe4-4439-905f-e36b8e427eb8" (UID: "891e210d-2fe4-4439-905f-e36b8e427eb8"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.483573 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/891e210d-2fe4-4439-905f-e36b8e427eb8-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "891e210d-2fe4-4439-905f-e36b8e427eb8" (UID: "891e210d-2fe4-4439-905f-e36b8e427eb8"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.490304 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/891e210d-2fe4-4439-905f-e36b8e427eb8-kube-api-access-cdxfl" (OuterVolumeSpecName: "kube-api-access-cdxfl") pod "891e210d-2fe4-4439-905f-e36b8e427eb8" (UID: "891e210d-2fe4-4439-905f-e36b8e427eb8"). InnerVolumeSpecName "kube-api-access-cdxfl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.493701 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/891e210d-2fe4-4439-905f-e36b8e427eb8-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "891e210d-2fe4-4439-905f-e36b8e427eb8" (UID: "891e210d-2fe4-4439-905f-e36b8e427eb8"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.493910 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/891e210d-2fe4-4439-905f-e36b8e427eb8-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "891e210d-2fe4-4439-905f-e36b8e427eb8" (UID: "891e210d-2fe4-4439-905f-e36b8e427eb8"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.497707 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/891e210d-2fe4-4439-905f-e36b8e427eb8-pod-info" (OuterVolumeSpecName: "pod-info") pod "891e210d-2fe4-4439-905f-e36b8e427eb8" (UID: "891e210d-2fe4-4439-905f-e36b8e427eb8"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.502232 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/891e210d-2fe4-4439-905f-e36b8e427eb8-config-data" (OuterVolumeSpecName: "config-data") pod "891e210d-2fe4-4439-905f-e36b8e427eb8" (UID: "891e210d-2fe4-4439-905f-e36b8e427eb8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.503339 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5b85a27d-231a-48e1-b725-5f24c41608d1" (OuterVolumeSpecName: "persistence") pod "891e210d-2fe4-4439-905f-e36b8e427eb8" (UID: "891e210d-2fe4-4439-905f-e36b8e427eb8"). InnerVolumeSpecName "pvc-5b85a27d-231a-48e1-b725-5f24c41608d1". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.505181 5003 scope.go:117] "RemoveContainer" containerID="786a99417986cd76ae457dd06385e14e3d543a53f77fc61005746017eaf7d574" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.520158 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/891e210d-2fe4-4439-905f-e36b8e427eb8-server-conf" (OuterVolumeSpecName: "server-conf") pod "891e210d-2fe4-4439-905f-e36b8e427eb8" (UID: "891e210d-2fe4-4439-905f-e36b8e427eb8"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.523303 5003 scope.go:117] "RemoveContainer" containerID="29d402f6df851cff501dd417cf5fa58a98739de4a58e3c08d0051981ce64a6f7" Jan 04 13:13:04 crc kubenswrapper[5003]: E0104 13:13:04.523887 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29d402f6df851cff501dd417cf5fa58a98739de4a58e3c08d0051981ce64a6f7\": container with ID starting with 29d402f6df851cff501dd417cf5fa58a98739de4a58e3c08d0051981ce64a6f7 not found: ID does not exist" containerID="29d402f6df851cff501dd417cf5fa58a98739de4a58e3c08d0051981ce64a6f7" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.523945 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29d402f6df851cff501dd417cf5fa58a98739de4a58e3c08d0051981ce64a6f7"} err="failed to get container status \"29d402f6df851cff501dd417cf5fa58a98739de4a58e3c08d0051981ce64a6f7\": rpc error: code = NotFound desc = could not find container \"29d402f6df851cff501dd417cf5fa58a98739de4a58e3c08d0051981ce64a6f7\": container with ID starting with 29d402f6df851cff501dd417cf5fa58a98739de4a58e3c08d0051981ce64a6f7 not found: ID does not exist" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.523983 5003 scope.go:117] "RemoveContainer" containerID="786a99417986cd76ae457dd06385e14e3d543a53f77fc61005746017eaf7d574" Jan 04 13:13:04 crc kubenswrapper[5003]: E0104 13:13:04.524429 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"786a99417986cd76ae457dd06385e14e3d543a53f77fc61005746017eaf7d574\": container with ID starting with 786a99417986cd76ae457dd06385e14e3d543a53f77fc61005746017eaf7d574 not found: ID does not exist" containerID="786a99417986cd76ae457dd06385e14e3d543a53f77fc61005746017eaf7d574" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.524464 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"786a99417986cd76ae457dd06385e14e3d543a53f77fc61005746017eaf7d574"} err="failed to get container status \"786a99417986cd76ae457dd06385e14e3d543a53f77fc61005746017eaf7d574\": rpc error: code = NotFound desc = could not find container \"786a99417986cd76ae457dd06385e14e3d543a53f77fc61005746017eaf7d574\": container with ID starting with 786a99417986cd76ae457dd06385e14e3d543a53f77fc61005746017eaf7d574 not found: ID does not exist" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.556336 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/891e210d-2fe4-4439-905f-e36b8e427eb8-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "891e210d-2fe4-4439-905f-e36b8e427eb8" (UID: "891e210d-2fe4-4439-905f-e36b8e427eb8"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.582335 5003 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/891e210d-2fe4-4439-905f-e36b8e427eb8-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.582370 5003 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/891e210d-2fe4-4439-905f-e36b8e427eb8-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.582380 5003 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/891e210d-2fe4-4439-905f-e36b8e427eb8-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.582392 5003 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/891e210d-2fe4-4439-905f-e36b8e427eb8-pod-info\") on node \"crc\" DevicePath \"\"" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.582401 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/891e210d-2fe4-4439-905f-e36b8e427eb8-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.582441 5003 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-5b85a27d-231a-48e1-b725-5f24c41608d1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5b85a27d-231a-48e1-b725-5f24c41608d1\") on node \"crc\" " Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.582452 5003 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/891e210d-2fe4-4439-905f-e36b8e427eb8-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.582462 5003 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/891e210d-2fe4-4439-905f-e36b8e427eb8-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.582472 5003 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/891e210d-2fe4-4439-905f-e36b8e427eb8-server-conf\") on node \"crc\" DevicePath \"\"" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.582480 5003 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/891e210d-2fe4-4439-905f-e36b8e427eb8-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.582488 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdxfl\" (UniqueName: \"kubernetes.io/projected/891e210d-2fe4-4439-905f-e36b8e427eb8-kube-api-access-cdxfl\") on node \"crc\" DevicePath \"\"" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.598542 5003 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.598794 5003 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-5b85a27d-231a-48e1-b725-5f24c41608d1" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5b85a27d-231a-48e1-b725-5f24c41608d1") on node "crc" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.684280 5003 reconciler_common.go:293] "Volume detached for volume \"pvc-5b85a27d-231a-48e1-b725-5f24c41608d1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5b85a27d-231a-48e1-b725-5f24c41608d1\") on node \"crc\" DevicePath \"\"" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.769653 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Jan 04 13:13:04 crc kubenswrapper[5003]: W0104 13:13:04.782701 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8934ee0c_1985_4bc1_95cb_50fcbbf7909d.slice/crio-c393a2c2b05e1407749fadd217369062edd1c7d768fba3a1cb6946aafab8835c WatchSource:0}: Error finding container c393a2c2b05e1407749fadd217369062edd1c7d768fba3a1cb6946aafab8835c: Status 404 returned error can't find the container with id c393a2c2b05e1407749fadd217369062edd1c7d768fba3a1cb6946aafab8835c Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.834901 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="330d5594-4379-4cb8-80af-0af1c0ff29e8" path="/var/lib/kubelet/pods/330d5594-4379-4cb8-80af-0af1c0ff29e8/volumes" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.837200 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a5ca80d-195f-4290-8db3-79ebbe58387c" path="/var/lib/kubelet/pods/3a5ca80d-195f-4290-8db3-79ebbe58387c/volumes" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.918155 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.937224 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.949814 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 04 13:13:04 crc kubenswrapper[5003]: E0104 13:13:04.950236 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="891e210d-2fe4-4439-905f-e36b8e427eb8" containerName="rabbitmq" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.950255 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="891e210d-2fe4-4439-905f-e36b8e427eb8" containerName="rabbitmq" Jan 04 13:13:04 crc kubenswrapper[5003]: E0104 13:13:04.950285 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="891e210d-2fe4-4439-905f-e36b8e427eb8" containerName="setup-container" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.950293 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="891e210d-2fe4-4439-905f-e36b8e427eb8" containerName="setup-container" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.950457 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="891e210d-2fe4-4439-905f-e36b8e427eb8" containerName="rabbitmq" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.951316 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.954670 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.954962 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.955296 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.955622 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.956029 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.956071 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-wllz4" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.956532 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Jan 04 13:13:04 crc kubenswrapper[5003]: I0104 13:13:04.957876 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.093878 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvjj9\" (UniqueName: \"kubernetes.io/projected/006934d0-3792-4855-ae29-d1d336d53937-kube-api-access-bvjj9\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.093979 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/006934d0-3792-4855-ae29-d1d336d53937-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.094055 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/006934d0-3792-4855-ae29-d1d336d53937-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.094148 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/006934d0-3792-4855-ae29-d1d336d53937-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.094260 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/006934d0-3792-4855-ae29-d1d336d53937-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.094401 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/006934d0-3792-4855-ae29-d1d336d53937-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.094447 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/006934d0-3792-4855-ae29-d1d336d53937-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.094476 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/006934d0-3792-4855-ae29-d1d336d53937-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.094542 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/006934d0-3792-4855-ae29-d1d336d53937-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.094667 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-5b85a27d-231a-48e1-b725-5f24c41608d1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5b85a27d-231a-48e1-b725-5f24c41608d1\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.094725 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/006934d0-3792-4855-ae29-d1d336d53937-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.195969 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvjj9\" (UniqueName: \"kubernetes.io/projected/006934d0-3792-4855-ae29-d1d336d53937-kube-api-access-bvjj9\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.196038 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/006934d0-3792-4855-ae29-d1d336d53937-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.196070 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/006934d0-3792-4855-ae29-d1d336d53937-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.196098 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/006934d0-3792-4855-ae29-d1d336d53937-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.196124 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/006934d0-3792-4855-ae29-d1d336d53937-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.196163 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/006934d0-3792-4855-ae29-d1d336d53937-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.196187 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/006934d0-3792-4855-ae29-d1d336d53937-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.196212 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/006934d0-3792-4855-ae29-d1d336d53937-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.196232 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/006934d0-3792-4855-ae29-d1d336d53937-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.196276 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-5b85a27d-231a-48e1-b725-5f24c41608d1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5b85a27d-231a-48e1-b725-5f24c41608d1\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.196298 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/006934d0-3792-4855-ae29-d1d336d53937-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.197214 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/006934d0-3792-4855-ae29-d1d336d53937-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.197545 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/006934d0-3792-4855-ae29-d1d336d53937-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.197622 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/006934d0-3792-4855-ae29-d1d336d53937-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.197925 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/006934d0-3792-4855-ae29-d1d336d53937-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.198144 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/006934d0-3792-4855-ae29-d1d336d53937-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.198834 5003 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.198882 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-5b85a27d-231a-48e1-b725-5f24c41608d1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5b85a27d-231a-48e1-b725-5f24c41608d1\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/6c04eb50fedee20aea88220143cc7b3f6050cc4ad553ccffe048ae9026fa180c/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.201097 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/006934d0-3792-4855-ae29-d1d336d53937-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.203837 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/006934d0-3792-4855-ae29-d1d336d53937-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.203946 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/006934d0-3792-4855-ae29-d1d336d53937-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.207353 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/006934d0-3792-4855-ae29-d1d336d53937-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.226231 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvjj9\" (UniqueName: \"kubernetes.io/projected/006934d0-3792-4855-ae29-d1d336d53937-kube-api-access-bvjj9\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.235194 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-5b85a27d-231a-48e1-b725-5f24c41608d1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5b85a27d-231a-48e1-b725-5f24c41608d1\") pod \"rabbitmq-cell1-server-0\" (UID: \"006934d0-3792-4855-ae29-d1d336d53937\") " pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.315229 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.486384 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"8934ee0c-1985-4bc1-95cb-50fcbbf7909d","Type":"ContainerStarted","Data":"c393a2c2b05e1407749fadd217369062edd1c7d768fba3a1cb6946aafab8835c"} Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.551630 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Jan 04 13:13:05 crc kubenswrapper[5003]: W0104 13:13:05.559638 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod006934d0_3792_4855_ae29_d1d336d53937.slice/crio-93063c624429f87bdcb03d64c5e86aff9c10a17dc5aa3ee271c570f66389e1cb WatchSource:0}: Error finding container 93063c624429f87bdcb03d64c5e86aff9c10a17dc5aa3ee271c570f66389e1cb: Status 404 returned error can't find the container with id 93063c624429f87bdcb03d64c5e86aff9c10a17dc5aa3ee271c570f66389e1cb Jan 04 13:13:05 crc kubenswrapper[5003]: I0104 13:13:05.807157 5003 scope.go:117] "RemoveContainer" containerID="ab89a3503b868c615a10b0dddcf9602282c1df798b3f18e301d8cf21d5ab3516" Jan 04 13:13:05 crc kubenswrapper[5003]: E0104 13:13:05.807470 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:13:06 crc kubenswrapper[5003]: I0104 13:13:06.497365 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"8934ee0c-1985-4bc1-95cb-50fcbbf7909d","Type":"ContainerStarted","Data":"d39d165d62f7283b3bf7133c6257ca50d44d0c8134a0e04abaa061728fd5b4fc"} Jan 04 13:13:06 crc kubenswrapper[5003]: I0104 13:13:06.498804 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"006934d0-3792-4855-ae29-d1d336d53937","Type":"ContainerStarted","Data":"93063c624429f87bdcb03d64c5e86aff9c10a17dc5aa3ee271c570f66389e1cb"} Jan 04 13:13:06 crc kubenswrapper[5003]: I0104 13:13:06.822744 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="891e210d-2fe4-4439-905f-e36b8e427eb8" path="/var/lib/kubelet/pods/891e210d-2fe4-4439-905f-e36b8e427eb8/volumes" Jan 04 13:13:07 crc kubenswrapper[5003]: I0104 13:13:07.509905 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"006934d0-3792-4855-ae29-d1d336d53937","Type":"ContainerStarted","Data":"aa79f67af1a3e9adeca58c755889eefdc9aed797382d971ff5238c75980336c6"} Jan 04 13:13:16 crc kubenswrapper[5003]: I0104 13:13:16.807121 5003 scope.go:117] "RemoveContainer" containerID="ab89a3503b868c615a10b0dddcf9602282c1df798b3f18e301d8cf21d5ab3516" Jan 04 13:13:16 crc kubenswrapper[5003]: E0104 13:13:16.807862 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:13:29 crc kubenswrapper[5003]: I0104 13:13:29.807597 5003 scope.go:117] "RemoveContainer" containerID="ab89a3503b868c615a10b0dddcf9602282c1df798b3f18e301d8cf21d5ab3516" Jan 04 13:13:29 crc kubenswrapper[5003]: E0104 13:13:29.808736 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:13:39 crc kubenswrapper[5003]: I0104 13:13:39.810341 5003 generic.go:334] "Generic (PLEG): container finished" podID="8934ee0c-1985-4bc1-95cb-50fcbbf7909d" containerID="d39d165d62f7283b3bf7133c6257ca50d44d0c8134a0e04abaa061728fd5b4fc" exitCode=0 Jan 04 13:13:39 crc kubenswrapper[5003]: I0104 13:13:39.810444 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"8934ee0c-1985-4bc1-95cb-50fcbbf7909d","Type":"ContainerDied","Data":"d39d165d62f7283b3bf7133c6257ca50d44d0c8134a0e04abaa061728fd5b4fc"} Jan 04 13:13:40 crc kubenswrapper[5003]: I0104 13:13:40.826206 5003 generic.go:334] "Generic (PLEG): container finished" podID="006934d0-3792-4855-ae29-d1d336d53937" containerID="aa79f67af1a3e9adeca58c755889eefdc9aed797382d971ff5238c75980336c6" exitCode=0 Jan 04 13:13:40 crc kubenswrapper[5003]: I0104 13:13:40.826368 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"006934d0-3792-4855-ae29-d1d336d53937","Type":"ContainerDied","Data":"aa79f67af1a3e9adeca58c755889eefdc9aed797382d971ff5238c75980336c6"} Jan 04 13:13:40 crc kubenswrapper[5003]: I0104 13:13:40.830647 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"8934ee0c-1985-4bc1-95cb-50fcbbf7909d","Type":"ContainerStarted","Data":"6da3a5254b77d7e0dbb70c4489ea640eaf5efc47c4ef9bc6d46118e887c69c5a"} Jan 04 13:13:40 crc kubenswrapper[5003]: I0104 13:13:40.831092 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Jan 04 13:13:40 crc kubenswrapper[5003]: I0104 13:13:40.898680 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.898641461 podStartE2EDuration="37.898641461s" podCreationTimestamp="2026-01-04 13:13:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 13:13:40.88916678 +0000 UTC m=+5136.362196641" watchObservedRunningTime="2026-01-04 13:13:40.898641461 +0000 UTC m=+5136.371671332" Jan 04 13:13:41 crc kubenswrapper[5003]: I0104 13:13:41.806735 5003 scope.go:117] "RemoveContainer" containerID="ab89a3503b868c615a10b0dddcf9602282c1df798b3f18e301d8cf21d5ab3516" Jan 04 13:13:41 crc kubenswrapper[5003]: E0104 13:13:41.807374 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:13:41 crc kubenswrapper[5003]: I0104 13:13:41.840979 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"006934d0-3792-4855-ae29-d1d336d53937","Type":"ContainerStarted","Data":"0822cea0616d9bdc87eab1c76ff215034a88c4ff7de34b0f649e407675949c48"} Jan 04 13:13:41 crc kubenswrapper[5003]: I0104 13:13:41.841264 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:41 crc kubenswrapper[5003]: I0104 13:13:41.883287 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.883269028 podStartE2EDuration="37.883269028s" podCreationTimestamp="2026-01-04 13:13:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 13:13:41.876170339 +0000 UTC m=+5137.349200180" watchObservedRunningTime="2026-01-04 13:13:41.883269028 +0000 UTC m=+5137.356298889" Jan 04 13:13:54 crc kubenswrapper[5003]: I0104 13:13:54.277279 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Jan 04 13:13:55 crc kubenswrapper[5003]: I0104 13:13:55.319229 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Jan 04 13:13:56 crc kubenswrapper[5003]: I0104 13:13:56.807547 5003 scope.go:117] "RemoveContainer" containerID="ab89a3503b868c615a10b0dddcf9602282c1df798b3f18e301d8cf21d5ab3516" Jan 04 13:13:56 crc kubenswrapper[5003]: E0104 13:13:56.807846 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:14:00 crc kubenswrapper[5003]: I0104 13:14:00.903047 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-1-default"] Jan 04 13:14:00 crc kubenswrapper[5003]: I0104 13:14:00.906401 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Jan 04 13:14:00 crc kubenswrapper[5003]: I0104 13:14:00.910125 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-bg47z" Jan 04 13:14:00 crc kubenswrapper[5003]: I0104 13:14:00.916005 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1-default"] Jan 04 13:14:01 crc kubenswrapper[5003]: I0104 13:14:01.004733 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9bnz\" (UniqueName: \"kubernetes.io/projected/3854eb8b-067f-4f3b-9a58-269192da304e-kube-api-access-m9bnz\") pod \"mariadb-client-1-default\" (UID: \"3854eb8b-067f-4f3b-9a58-269192da304e\") " pod="openstack/mariadb-client-1-default" Jan 04 13:14:01 crc kubenswrapper[5003]: I0104 13:14:01.107252 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9bnz\" (UniqueName: \"kubernetes.io/projected/3854eb8b-067f-4f3b-9a58-269192da304e-kube-api-access-m9bnz\") pod \"mariadb-client-1-default\" (UID: \"3854eb8b-067f-4f3b-9a58-269192da304e\") " pod="openstack/mariadb-client-1-default" Jan 04 13:14:01 crc kubenswrapper[5003]: I0104 13:14:01.143776 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9bnz\" (UniqueName: \"kubernetes.io/projected/3854eb8b-067f-4f3b-9a58-269192da304e-kube-api-access-m9bnz\") pod \"mariadb-client-1-default\" (UID: \"3854eb8b-067f-4f3b-9a58-269192da304e\") " pod="openstack/mariadb-client-1-default" Jan 04 13:14:01 crc kubenswrapper[5003]: I0104 13:14:01.243161 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Jan 04 13:14:01 crc kubenswrapper[5003]: I0104 13:14:01.836307 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1-default"] Jan 04 13:14:01 crc kubenswrapper[5003]: W0104 13:14:01.846169 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3854eb8b_067f_4f3b_9a58_269192da304e.slice/crio-3ba65c67c8815b6f68ad9e8fe168c4873a1d213e5b24414bcb82ee1e85fa241c WatchSource:0}: Error finding container 3ba65c67c8815b6f68ad9e8fe168c4873a1d213e5b24414bcb82ee1e85fa241c: Status 404 returned error can't find the container with id 3ba65c67c8815b6f68ad9e8fe168c4873a1d213e5b24414bcb82ee1e85fa241c Jan 04 13:14:02 crc kubenswrapper[5003]: I0104 13:14:02.010677 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1-default" event={"ID":"3854eb8b-067f-4f3b-9a58-269192da304e","Type":"ContainerStarted","Data":"3ba65c67c8815b6f68ad9e8fe168c4873a1d213e5b24414bcb82ee1e85fa241c"} Jan 04 13:14:03 crc kubenswrapper[5003]: I0104 13:14:03.021241 5003 generic.go:334] "Generic (PLEG): container finished" podID="3854eb8b-067f-4f3b-9a58-269192da304e" containerID="e300cc1143ad91098ba62f6572ebd21dd04ddd2f6120344bbb314a6935b12a60" exitCode=0 Jan 04 13:14:03 crc kubenswrapper[5003]: I0104 13:14:03.021720 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1-default" event={"ID":"3854eb8b-067f-4f3b-9a58-269192da304e","Type":"ContainerDied","Data":"e300cc1143ad91098ba62f6572ebd21dd04ddd2f6120344bbb314a6935b12a60"} Jan 04 13:14:04 crc kubenswrapper[5003]: I0104 13:14:04.414126 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Jan 04 13:14:04 crc kubenswrapper[5003]: I0104 13:14:04.441985 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-1-default_3854eb8b-067f-4f3b-9a58-269192da304e/mariadb-client-1-default/0.log" Jan 04 13:14:04 crc kubenswrapper[5003]: I0104 13:14:04.467998 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m9bnz\" (UniqueName: \"kubernetes.io/projected/3854eb8b-067f-4f3b-9a58-269192da304e-kube-api-access-m9bnz\") pod \"3854eb8b-067f-4f3b-9a58-269192da304e\" (UID: \"3854eb8b-067f-4f3b-9a58-269192da304e\") " Jan 04 13:14:04 crc kubenswrapper[5003]: I0104 13:14:04.478389 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3854eb8b-067f-4f3b-9a58-269192da304e-kube-api-access-m9bnz" (OuterVolumeSpecName: "kube-api-access-m9bnz") pod "3854eb8b-067f-4f3b-9a58-269192da304e" (UID: "3854eb8b-067f-4f3b-9a58-269192da304e"). InnerVolumeSpecName "kube-api-access-m9bnz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:14:04 crc kubenswrapper[5003]: I0104 13:14:04.492814 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-1-default"] Jan 04 13:14:04 crc kubenswrapper[5003]: I0104 13:14:04.504393 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-1-default"] Jan 04 13:14:04 crc kubenswrapper[5003]: I0104 13:14:04.570585 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m9bnz\" (UniqueName: \"kubernetes.io/projected/3854eb8b-067f-4f3b-9a58-269192da304e-kube-api-access-m9bnz\") on node \"crc\" DevicePath \"\"" Jan 04 13:14:04 crc kubenswrapper[5003]: I0104 13:14:04.818725 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3854eb8b-067f-4f3b-9a58-269192da304e" path="/var/lib/kubelet/pods/3854eb8b-067f-4f3b-9a58-269192da304e/volumes" Jan 04 13:14:05 crc kubenswrapper[5003]: I0104 13:14:05.029422 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-2-default"] Jan 04 13:14:05 crc kubenswrapper[5003]: E0104 13:14:05.029736 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3854eb8b-067f-4f3b-9a58-269192da304e" containerName="mariadb-client-1-default" Jan 04 13:14:05 crc kubenswrapper[5003]: I0104 13:14:05.029760 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="3854eb8b-067f-4f3b-9a58-269192da304e" containerName="mariadb-client-1-default" Jan 04 13:14:05 crc kubenswrapper[5003]: I0104 13:14:05.029917 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="3854eb8b-067f-4f3b-9a58-269192da304e" containerName="mariadb-client-1-default" Jan 04 13:14:05 crc kubenswrapper[5003]: I0104 13:14:05.030440 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Jan 04 13:14:05 crc kubenswrapper[5003]: I0104 13:14:05.040228 5003 scope.go:117] "RemoveContainer" containerID="e300cc1143ad91098ba62f6572ebd21dd04ddd2f6120344bbb314a6935b12a60" Jan 04 13:14:05 crc kubenswrapper[5003]: I0104 13:14:05.040786 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Jan 04 13:14:05 crc kubenswrapper[5003]: I0104 13:14:05.041100 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2-default"] Jan 04 13:14:05 crc kubenswrapper[5003]: I0104 13:14:05.180800 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9tf96\" (UniqueName: \"kubernetes.io/projected/f9a87e68-3b87-4cd9-9552-8076be81093e-kube-api-access-9tf96\") pod \"mariadb-client-2-default\" (UID: \"f9a87e68-3b87-4cd9-9552-8076be81093e\") " pod="openstack/mariadb-client-2-default" Jan 04 13:14:05 crc kubenswrapper[5003]: I0104 13:14:05.282856 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9tf96\" (UniqueName: \"kubernetes.io/projected/f9a87e68-3b87-4cd9-9552-8076be81093e-kube-api-access-9tf96\") pod \"mariadb-client-2-default\" (UID: \"f9a87e68-3b87-4cd9-9552-8076be81093e\") " pod="openstack/mariadb-client-2-default" Jan 04 13:14:05 crc kubenswrapper[5003]: I0104 13:14:05.305381 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9tf96\" (UniqueName: \"kubernetes.io/projected/f9a87e68-3b87-4cd9-9552-8076be81093e-kube-api-access-9tf96\") pod \"mariadb-client-2-default\" (UID: \"f9a87e68-3b87-4cd9-9552-8076be81093e\") " pod="openstack/mariadb-client-2-default" Jan 04 13:14:05 crc kubenswrapper[5003]: I0104 13:14:05.415564 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Jan 04 13:14:05 crc kubenswrapper[5003]: I0104 13:14:05.923508 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2-default"] Jan 04 13:14:06 crc kubenswrapper[5003]: I0104 13:14:06.047741 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"f9a87e68-3b87-4cd9-9552-8076be81093e","Type":"ContainerStarted","Data":"de9fc074100c7c0ffa2b08c00f6b5c5c70003d296545f0e3574da9aea2765327"} Jan 04 13:14:07 crc kubenswrapper[5003]: I0104 13:14:07.056603 5003 generic.go:334] "Generic (PLEG): container finished" podID="f9a87e68-3b87-4cd9-9552-8076be81093e" containerID="357247eac594aa18425aa1c27519f655642d0ca34632d7f5734691ee0393d77c" exitCode=1 Jan 04 13:14:07 crc kubenswrapper[5003]: I0104 13:14:07.056654 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"f9a87e68-3b87-4cd9-9552-8076be81093e","Type":"ContainerDied","Data":"357247eac594aa18425aa1c27519f655642d0ca34632d7f5734691ee0393d77c"} Jan 04 13:14:09 crc kubenswrapper[5003]: I0104 13:14:09.071455 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"f9a87e68-3b87-4cd9-9552-8076be81093e","Type":"ContainerDied","Data":"de9fc074100c7c0ffa2b08c00f6b5c5c70003d296545f0e3574da9aea2765327"} Jan 04 13:14:09 crc kubenswrapper[5003]: I0104 13:14:09.071927 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="de9fc074100c7c0ffa2b08c00f6b5c5c70003d296545f0e3574da9aea2765327" Jan 04 13:14:09 crc kubenswrapper[5003]: I0104 13:14:09.135625 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Jan 04 13:14:09 crc kubenswrapper[5003]: I0104 13:14:09.152980 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-2-default_f9a87e68-3b87-4cd9-9552-8076be81093e/mariadb-client-2-default/0.log" Jan 04 13:14:09 crc kubenswrapper[5003]: I0104 13:14:09.191155 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-2-default"] Jan 04 13:14:09 crc kubenswrapper[5003]: I0104 13:14:09.199604 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-2-default"] Jan 04 13:14:09 crc kubenswrapper[5003]: I0104 13:14:09.252001 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9tf96\" (UniqueName: \"kubernetes.io/projected/f9a87e68-3b87-4cd9-9552-8076be81093e-kube-api-access-9tf96\") pod \"f9a87e68-3b87-4cd9-9552-8076be81093e\" (UID: \"f9a87e68-3b87-4cd9-9552-8076be81093e\") " Jan 04 13:14:09 crc kubenswrapper[5003]: I0104 13:14:09.258295 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9a87e68-3b87-4cd9-9552-8076be81093e-kube-api-access-9tf96" (OuterVolumeSpecName: "kube-api-access-9tf96") pod "f9a87e68-3b87-4cd9-9552-8076be81093e" (UID: "f9a87e68-3b87-4cd9-9552-8076be81093e"). InnerVolumeSpecName "kube-api-access-9tf96". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:14:09 crc kubenswrapper[5003]: I0104 13:14:09.353641 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9tf96\" (UniqueName: \"kubernetes.io/projected/f9a87e68-3b87-4cd9-9552-8076be81093e-kube-api-access-9tf96\") on node \"crc\" DevicePath \"\"" Jan 04 13:14:09 crc kubenswrapper[5003]: I0104 13:14:09.784500 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-1"] Jan 04 13:14:09 crc kubenswrapper[5003]: E0104 13:14:09.784851 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9a87e68-3b87-4cd9-9552-8076be81093e" containerName="mariadb-client-2-default" Jan 04 13:14:09 crc kubenswrapper[5003]: I0104 13:14:09.784874 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9a87e68-3b87-4cd9-9552-8076be81093e" containerName="mariadb-client-2-default" Jan 04 13:14:09 crc kubenswrapper[5003]: I0104 13:14:09.785097 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9a87e68-3b87-4cd9-9552-8076be81093e" containerName="mariadb-client-2-default" Jan 04 13:14:09 crc kubenswrapper[5003]: I0104 13:14:09.785667 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Jan 04 13:14:09 crc kubenswrapper[5003]: I0104 13:14:09.798667 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1"] Jan 04 13:14:09 crc kubenswrapper[5003]: I0104 13:14:09.806788 5003 scope.go:117] "RemoveContainer" containerID="ab89a3503b868c615a10b0dddcf9602282c1df798b3f18e301d8cf21d5ab3516" Jan 04 13:14:09 crc kubenswrapper[5003]: E0104 13:14:09.807101 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:14:09 crc kubenswrapper[5003]: I0104 13:14:09.863260 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5qmm\" (UniqueName: \"kubernetes.io/projected/d90155e2-48e6-40d6-a832-2d807698ad20-kube-api-access-f5qmm\") pod \"mariadb-client-1\" (UID: \"d90155e2-48e6-40d6-a832-2d807698ad20\") " pod="openstack/mariadb-client-1" Jan 04 13:14:09 crc kubenswrapper[5003]: I0104 13:14:09.965406 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5qmm\" (UniqueName: \"kubernetes.io/projected/d90155e2-48e6-40d6-a832-2d807698ad20-kube-api-access-f5qmm\") pod \"mariadb-client-1\" (UID: \"d90155e2-48e6-40d6-a832-2d807698ad20\") " pod="openstack/mariadb-client-1" Jan 04 13:14:09 crc kubenswrapper[5003]: I0104 13:14:09.988271 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5qmm\" (UniqueName: \"kubernetes.io/projected/d90155e2-48e6-40d6-a832-2d807698ad20-kube-api-access-f5qmm\") pod \"mariadb-client-1\" (UID: \"d90155e2-48e6-40d6-a832-2d807698ad20\") " pod="openstack/mariadb-client-1" Jan 04 13:14:10 crc kubenswrapper[5003]: I0104 13:14:10.080684 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Jan 04 13:14:10 crc kubenswrapper[5003]: I0104 13:14:10.111669 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Jan 04 13:14:10 crc kubenswrapper[5003]: I0104 13:14:10.687839 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1"] Jan 04 13:14:10 crc kubenswrapper[5003]: W0104 13:14:10.697651 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd90155e2_48e6_40d6_a832_2d807698ad20.slice/crio-8bc570074583fae84684d79d62bf11c2afbfbcb0c0486ae69a5ac19379f3b976 WatchSource:0}: Error finding container 8bc570074583fae84684d79d62bf11c2afbfbcb0c0486ae69a5ac19379f3b976: Status 404 returned error can't find the container with id 8bc570074583fae84684d79d62bf11c2afbfbcb0c0486ae69a5ac19379f3b976 Jan 04 13:14:10 crc kubenswrapper[5003]: I0104 13:14:10.822745 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9a87e68-3b87-4cd9-9552-8076be81093e" path="/var/lib/kubelet/pods/f9a87e68-3b87-4cd9-9552-8076be81093e/volumes" Jan 04 13:14:11 crc kubenswrapper[5003]: I0104 13:14:11.092361 5003 generic.go:334] "Generic (PLEG): container finished" podID="d90155e2-48e6-40d6-a832-2d807698ad20" containerID="0acdd49ba6d84a81f92a39e3dbc4c3a64b401e1c9ec41b4969f38051b102fab2" exitCode=0 Jan 04 13:14:11 crc kubenswrapper[5003]: I0104 13:14:11.092432 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1" event={"ID":"d90155e2-48e6-40d6-a832-2d807698ad20","Type":"ContainerDied","Data":"0acdd49ba6d84a81f92a39e3dbc4c3a64b401e1c9ec41b4969f38051b102fab2"} Jan 04 13:14:11 crc kubenswrapper[5003]: I0104 13:14:11.092493 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1" event={"ID":"d90155e2-48e6-40d6-a832-2d807698ad20","Type":"ContainerStarted","Data":"8bc570074583fae84684d79d62bf11c2afbfbcb0c0486ae69a5ac19379f3b976"} Jan 04 13:14:12 crc kubenswrapper[5003]: I0104 13:14:12.553616 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Jan 04 13:14:12 crc kubenswrapper[5003]: I0104 13:14:12.572877 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-1_d90155e2-48e6-40d6-a832-2d807698ad20/mariadb-client-1/0.log" Jan 04 13:14:12 crc kubenswrapper[5003]: I0104 13:14:12.611930 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-1"] Jan 04 13:14:12 crc kubenswrapper[5003]: I0104 13:14:12.621553 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-1"] Jan 04 13:14:12 crc kubenswrapper[5003]: I0104 13:14:12.732095 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f5qmm\" (UniqueName: \"kubernetes.io/projected/d90155e2-48e6-40d6-a832-2d807698ad20-kube-api-access-f5qmm\") pod \"d90155e2-48e6-40d6-a832-2d807698ad20\" (UID: \"d90155e2-48e6-40d6-a832-2d807698ad20\") " Jan 04 13:14:12 crc kubenswrapper[5003]: I0104 13:14:12.739104 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d90155e2-48e6-40d6-a832-2d807698ad20-kube-api-access-f5qmm" (OuterVolumeSpecName: "kube-api-access-f5qmm") pod "d90155e2-48e6-40d6-a832-2d807698ad20" (UID: "d90155e2-48e6-40d6-a832-2d807698ad20"). InnerVolumeSpecName "kube-api-access-f5qmm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:14:12 crc kubenswrapper[5003]: I0104 13:14:12.818825 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d90155e2-48e6-40d6-a832-2d807698ad20" path="/var/lib/kubelet/pods/d90155e2-48e6-40d6-a832-2d807698ad20/volumes" Jan 04 13:14:12 crc kubenswrapper[5003]: I0104 13:14:12.833985 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f5qmm\" (UniqueName: \"kubernetes.io/projected/d90155e2-48e6-40d6-a832-2d807698ad20-kube-api-access-f5qmm\") on node \"crc\" DevicePath \"\"" Jan 04 13:14:13 crc kubenswrapper[5003]: I0104 13:14:13.115450 5003 scope.go:117] "RemoveContainer" containerID="0acdd49ba6d84a81f92a39e3dbc4c3a64b401e1c9ec41b4969f38051b102fab2" Jan 04 13:14:13 crc kubenswrapper[5003]: I0104 13:14:13.115899 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Jan 04 13:14:13 crc kubenswrapper[5003]: I0104 13:14:13.166526 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-4-default"] Jan 04 13:14:13 crc kubenswrapper[5003]: E0104 13:14:13.166836 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d90155e2-48e6-40d6-a832-2d807698ad20" containerName="mariadb-client-1" Jan 04 13:14:13 crc kubenswrapper[5003]: I0104 13:14:13.166849 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d90155e2-48e6-40d6-a832-2d807698ad20" containerName="mariadb-client-1" Jan 04 13:14:13 crc kubenswrapper[5003]: I0104 13:14:13.166994 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d90155e2-48e6-40d6-a832-2d807698ad20" containerName="mariadb-client-1" Jan 04 13:14:13 crc kubenswrapper[5003]: I0104 13:14:13.167478 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Jan 04 13:14:13 crc kubenswrapper[5003]: I0104 13:14:13.171116 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-bg47z" Jan 04 13:14:13 crc kubenswrapper[5003]: I0104 13:14:13.175587 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-4-default"] Jan 04 13:14:13 crc kubenswrapper[5003]: I0104 13:14:13.342380 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8k97g\" (UniqueName: \"kubernetes.io/projected/a85304db-e6c2-4872-872d-6dbdf14865bc-kube-api-access-8k97g\") pod \"mariadb-client-4-default\" (UID: \"a85304db-e6c2-4872-872d-6dbdf14865bc\") " pod="openstack/mariadb-client-4-default" Jan 04 13:14:13 crc kubenswrapper[5003]: I0104 13:14:13.443863 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8k97g\" (UniqueName: \"kubernetes.io/projected/a85304db-e6c2-4872-872d-6dbdf14865bc-kube-api-access-8k97g\") pod \"mariadb-client-4-default\" (UID: \"a85304db-e6c2-4872-872d-6dbdf14865bc\") " pod="openstack/mariadb-client-4-default" Jan 04 13:14:13 crc kubenswrapper[5003]: I0104 13:14:13.464379 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8k97g\" (UniqueName: \"kubernetes.io/projected/a85304db-e6c2-4872-872d-6dbdf14865bc-kube-api-access-8k97g\") pod \"mariadb-client-4-default\" (UID: \"a85304db-e6c2-4872-872d-6dbdf14865bc\") " pod="openstack/mariadb-client-4-default" Jan 04 13:14:13 crc kubenswrapper[5003]: I0104 13:14:13.523176 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Jan 04 13:14:14 crc kubenswrapper[5003]: I0104 13:14:14.072622 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-4-default"] Jan 04 13:14:14 crc kubenswrapper[5003]: I0104 13:14:14.128274 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-4-default" event={"ID":"a85304db-e6c2-4872-872d-6dbdf14865bc","Type":"ContainerStarted","Data":"1b24de8837949cde03add9e15b8c84552ac1ce97ca9994012f1c2ab192ebc4c8"} Jan 04 13:14:15 crc kubenswrapper[5003]: I0104 13:14:15.140614 5003 generic.go:334] "Generic (PLEG): container finished" podID="a85304db-e6c2-4872-872d-6dbdf14865bc" containerID="1fbd067c4507b1827b79a198eb7427d7b347312f6c6a13a7906da3af39434edd" exitCode=0 Jan 04 13:14:15 crc kubenswrapper[5003]: I0104 13:14:15.140670 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-4-default" event={"ID":"a85304db-e6c2-4872-872d-6dbdf14865bc","Type":"ContainerDied","Data":"1fbd067c4507b1827b79a198eb7427d7b347312f6c6a13a7906da3af39434edd"} Jan 04 13:14:16 crc kubenswrapper[5003]: I0104 13:14:16.549563 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Jan 04 13:14:16 crc kubenswrapper[5003]: I0104 13:14:16.576681 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-4-default_a85304db-e6c2-4872-872d-6dbdf14865bc/mariadb-client-4-default/0.log" Jan 04 13:14:16 crc kubenswrapper[5003]: I0104 13:14:16.608503 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-4-default"] Jan 04 13:14:16 crc kubenswrapper[5003]: I0104 13:14:16.615132 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-4-default"] Jan 04 13:14:16 crc kubenswrapper[5003]: I0104 13:14:16.704472 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8k97g\" (UniqueName: \"kubernetes.io/projected/a85304db-e6c2-4872-872d-6dbdf14865bc-kube-api-access-8k97g\") pod \"a85304db-e6c2-4872-872d-6dbdf14865bc\" (UID: \"a85304db-e6c2-4872-872d-6dbdf14865bc\") " Jan 04 13:14:16 crc kubenswrapper[5003]: I0104 13:14:16.710304 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a85304db-e6c2-4872-872d-6dbdf14865bc-kube-api-access-8k97g" (OuterVolumeSpecName: "kube-api-access-8k97g") pod "a85304db-e6c2-4872-872d-6dbdf14865bc" (UID: "a85304db-e6c2-4872-872d-6dbdf14865bc"). InnerVolumeSpecName "kube-api-access-8k97g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:14:16 crc kubenswrapper[5003]: I0104 13:14:16.806234 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8k97g\" (UniqueName: \"kubernetes.io/projected/a85304db-e6c2-4872-872d-6dbdf14865bc-kube-api-access-8k97g\") on node \"crc\" DevicePath \"\"" Jan 04 13:14:16 crc kubenswrapper[5003]: I0104 13:14:16.819184 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a85304db-e6c2-4872-872d-6dbdf14865bc" path="/var/lib/kubelet/pods/a85304db-e6c2-4872-872d-6dbdf14865bc/volumes" Jan 04 13:14:17 crc kubenswrapper[5003]: I0104 13:14:17.166332 5003 scope.go:117] "RemoveContainer" containerID="1fbd067c4507b1827b79a198eb7427d7b347312f6c6a13a7906da3af39434edd" Jan 04 13:14:17 crc kubenswrapper[5003]: I0104 13:14:17.166440 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Jan 04 13:14:20 crc kubenswrapper[5003]: I0104 13:14:20.544993 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-5-default"] Jan 04 13:14:20 crc kubenswrapper[5003]: E0104 13:14:20.546046 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a85304db-e6c2-4872-872d-6dbdf14865bc" containerName="mariadb-client-4-default" Jan 04 13:14:20 crc kubenswrapper[5003]: I0104 13:14:20.546077 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="a85304db-e6c2-4872-872d-6dbdf14865bc" containerName="mariadb-client-4-default" Jan 04 13:14:20 crc kubenswrapper[5003]: I0104 13:14:20.546377 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="a85304db-e6c2-4872-872d-6dbdf14865bc" containerName="mariadb-client-4-default" Jan 04 13:14:20 crc kubenswrapper[5003]: I0104 13:14:20.547302 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Jan 04 13:14:20 crc kubenswrapper[5003]: I0104 13:14:20.550585 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-bg47z" Jan 04 13:14:20 crc kubenswrapper[5003]: I0104 13:14:20.551153 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-5-default"] Jan 04 13:14:20 crc kubenswrapper[5003]: I0104 13:14:20.673693 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6pk6\" (UniqueName: \"kubernetes.io/projected/fd150b3b-ee4f-48f1-a433-82bc932e3d0b-kube-api-access-r6pk6\") pod \"mariadb-client-5-default\" (UID: \"fd150b3b-ee4f-48f1-a433-82bc932e3d0b\") " pod="openstack/mariadb-client-5-default" Jan 04 13:14:20 crc kubenswrapper[5003]: I0104 13:14:20.775555 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6pk6\" (UniqueName: \"kubernetes.io/projected/fd150b3b-ee4f-48f1-a433-82bc932e3d0b-kube-api-access-r6pk6\") pod \"mariadb-client-5-default\" (UID: \"fd150b3b-ee4f-48f1-a433-82bc932e3d0b\") " pod="openstack/mariadb-client-5-default" Jan 04 13:14:20 crc kubenswrapper[5003]: I0104 13:14:20.805238 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6pk6\" (UniqueName: \"kubernetes.io/projected/fd150b3b-ee4f-48f1-a433-82bc932e3d0b-kube-api-access-r6pk6\") pod \"mariadb-client-5-default\" (UID: \"fd150b3b-ee4f-48f1-a433-82bc932e3d0b\") " pod="openstack/mariadb-client-5-default" Jan 04 13:14:20 crc kubenswrapper[5003]: I0104 13:14:20.878552 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Jan 04 13:14:21 crc kubenswrapper[5003]: I0104 13:14:21.466478 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-5-default"] Jan 04 13:14:22 crc kubenswrapper[5003]: I0104 13:14:22.214777 5003 generic.go:334] "Generic (PLEG): container finished" podID="fd150b3b-ee4f-48f1-a433-82bc932e3d0b" containerID="cc02aa7d89f2a7c5cbbfb381eee16b5be37d3467f6a0e97fe6472a7e50e384e4" exitCode=0 Jan 04 13:14:22 crc kubenswrapper[5003]: I0104 13:14:22.214837 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-5-default" event={"ID":"fd150b3b-ee4f-48f1-a433-82bc932e3d0b","Type":"ContainerDied","Data":"cc02aa7d89f2a7c5cbbfb381eee16b5be37d3467f6a0e97fe6472a7e50e384e4"} Jan 04 13:14:22 crc kubenswrapper[5003]: I0104 13:14:22.214900 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-5-default" event={"ID":"fd150b3b-ee4f-48f1-a433-82bc932e3d0b","Type":"ContainerStarted","Data":"15a3d7d419d72a90479f1658d5f97c7735b9706d9d8dbb967dafd716d9d81333"} Jan 04 13:14:22 crc kubenswrapper[5003]: I0104 13:14:22.807758 5003 scope.go:117] "RemoveContainer" containerID="ab89a3503b868c615a10b0dddcf9602282c1df798b3f18e301d8cf21d5ab3516" Jan 04 13:14:22 crc kubenswrapper[5003]: E0104 13:14:22.808274 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:14:23 crc kubenswrapper[5003]: I0104 13:14:23.744110 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Jan 04 13:14:23 crc kubenswrapper[5003]: I0104 13:14:23.767698 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-5-default_fd150b3b-ee4f-48f1-a433-82bc932e3d0b/mariadb-client-5-default/0.log" Jan 04 13:14:23 crc kubenswrapper[5003]: I0104 13:14:23.800702 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-5-default"] Jan 04 13:14:23 crc kubenswrapper[5003]: I0104 13:14:23.806211 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-5-default"] Jan 04 13:14:23 crc kubenswrapper[5003]: I0104 13:14:23.833280 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r6pk6\" (UniqueName: \"kubernetes.io/projected/fd150b3b-ee4f-48f1-a433-82bc932e3d0b-kube-api-access-r6pk6\") pod \"fd150b3b-ee4f-48f1-a433-82bc932e3d0b\" (UID: \"fd150b3b-ee4f-48f1-a433-82bc932e3d0b\") " Jan 04 13:14:23 crc kubenswrapper[5003]: I0104 13:14:23.841720 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd150b3b-ee4f-48f1-a433-82bc932e3d0b-kube-api-access-r6pk6" (OuterVolumeSpecName: "kube-api-access-r6pk6") pod "fd150b3b-ee4f-48f1-a433-82bc932e3d0b" (UID: "fd150b3b-ee4f-48f1-a433-82bc932e3d0b"). InnerVolumeSpecName "kube-api-access-r6pk6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:14:23 crc kubenswrapper[5003]: I0104 13:14:23.935510 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r6pk6\" (UniqueName: \"kubernetes.io/projected/fd150b3b-ee4f-48f1-a433-82bc932e3d0b-kube-api-access-r6pk6\") on node \"crc\" DevicePath \"\"" Jan 04 13:14:23 crc kubenswrapper[5003]: I0104 13:14:23.956755 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-6-default"] Jan 04 13:14:23 crc kubenswrapper[5003]: E0104 13:14:23.957170 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd150b3b-ee4f-48f1-a433-82bc932e3d0b" containerName="mariadb-client-5-default" Jan 04 13:14:23 crc kubenswrapper[5003]: I0104 13:14:23.957208 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd150b3b-ee4f-48f1-a433-82bc932e3d0b" containerName="mariadb-client-5-default" Jan 04 13:14:23 crc kubenswrapper[5003]: I0104 13:14:23.957434 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd150b3b-ee4f-48f1-a433-82bc932e3d0b" containerName="mariadb-client-5-default" Jan 04 13:14:23 crc kubenswrapper[5003]: I0104 13:14:23.958200 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Jan 04 13:14:23 crc kubenswrapper[5003]: I0104 13:14:23.963776 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-6-default"] Jan 04 13:14:24 crc kubenswrapper[5003]: I0104 13:14:24.138873 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tk8hl\" (UniqueName: \"kubernetes.io/projected/bc8b942e-d94b-4eb0-971a-8001b91111ec-kube-api-access-tk8hl\") pod \"mariadb-client-6-default\" (UID: \"bc8b942e-d94b-4eb0-971a-8001b91111ec\") " pod="openstack/mariadb-client-6-default" Jan 04 13:14:24 crc kubenswrapper[5003]: I0104 13:14:24.236500 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="15a3d7d419d72a90479f1658d5f97c7735b9706d9d8dbb967dafd716d9d81333" Jan 04 13:14:24 crc kubenswrapper[5003]: I0104 13:14:24.236570 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Jan 04 13:14:24 crc kubenswrapper[5003]: I0104 13:14:24.240508 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tk8hl\" (UniqueName: \"kubernetes.io/projected/bc8b942e-d94b-4eb0-971a-8001b91111ec-kube-api-access-tk8hl\") pod \"mariadb-client-6-default\" (UID: \"bc8b942e-d94b-4eb0-971a-8001b91111ec\") " pod="openstack/mariadb-client-6-default" Jan 04 13:14:24 crc kubenswrapper[5003]: I0104 13:14:24.270638 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tk8hl\" (UniqueName: \"kubernetes.io/projected/bc8b942e-d94b-4eb0-971a-8001b91111ec-kube-api-access-tk8hl\") pod \"mariadb-client-6-default\" (UID: \"bc8b942e-d94b-4eb0-971a-8001b91111ec\") " pod="openstack/mariadb-client-6-default" Jan 04 13:14:24 crc kubenswrapper[5003]: I0104 13:14:24.288054 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Jan 04 13:14:24 crc kubenswrapper[5003]: I0104 13:14:24.639243 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-6-default"] Jan 04 13:14:24 crc kubenswrapper[5003]: W0104 13:14:24.644781 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbc8b942e_d94b_4eb0_971a_8001b91111ec.slice/crio-a102be20cfe6b846002b66646cd0d0b77e66586600d2bf560b6ede6357682160 WatchSource:0}: Error finding container a102be20cfe6b846002b66646cd0d0b77e66586600d2bf560b6ede6357682160: Status 404 returned error can't find the container with id a102be20cfe6b846002b66646cd0d0b77e66586600d2bf560b6ede6357682160 Jan 04 13:14:24 crc kubenswrapper[5003]: I0104 13:14:24.818450 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd150b3b-ee4f-48f1-a433-82bc932e3d0b" path="/var/lib/kubelet/pods/fd150b3b-ee4f-48f1-a433-82bc932e3d0b/volumes" Jan 04 13:14:25 crc kubenswrapper[5003]: I0104 13:14:25.246981 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"bc8b942e-d94b-4eb0-971a-8001b91111ec","Type":"ContainerStarted","Data":"86c230fd02a050651095f0b4aa053db4da22570da041b613bd27957496e1f9d9"} Jan 04 13:14:25 crc kubenswrapper[5003]: I0104 13:14:25.247311 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"bc8b942e-d94b-4eb0-971a-8001b91111ec","Type":"ContainerStarted","Data":"a102be20cfe6b846002b66646cd0d0b77e66586600d2bf560b6ede6357682160"} Jan 04 13:14:25 crc kubenswrapper[5003]: I0104 13:14:25.269155 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-client-6-default" podStartSLOduration=2.269122023 podStartE2EDuration="2.269122023s" podCreationTimestamp="2026-01-04 13:14:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 13:14:25.262096897 +0000 UTC m=+5180.735126788" watchObservedRunningTime="2026-01-04 13:14:25.269122023 +0000 UTC m=+5180.742151904" Jan 04 13:14:25 crc kubenswrapper[5003]: I0104 13:14:25.375488 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-6-default_bc8b942e-d94b-4eb0-971a-8001b91111ec/mariadb-client-6-default/0.log" Jan 04 13:14:26 crc kubenswrapper[5003]: I0104 13:14:26.257438 5003 generic.go:334] "Generic (PLEG): container finished" podID="bc8b942e-d94b-4eb0-971a-8001b91111ec" containerID="86c230fd02a050651095f0b4aa053db4da22570da041b613bd27957496e1f9d9" exitCode=1 Jan 04 13:14:26 crc kubenswrapper[5003]: I0104 13:14:26.257509 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"bc8b942e-d94b-4eb0-971a-8001b91111ec","Type":"ContainerDied","Data":"86c230fd02a050651095f0b4aa053db4da22570da041b613bd27957496e1f9d9"} Jan 04 13:14:27 crc kubenswrapper[5003]: I0104 13:14:27.755242 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Jan 04 13:14:27 crc kubenswrapper[5003]: I0104 13:14:27.797872 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-6-default"] Jan 04 13:14:27 crc kubenswrapper[5003]: I0104 13:14:27.806742 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-6-default"] Jan 04 13:14:27 crc kubenswrapper[5003]: I0104 13:14:27.895429 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk8hl\" (UniqueName: \"kubernetes.io/projected/bc8b942e-d94b-4eb0-971a-8001b91111ec-kube-api-access-tk8hl\") pod \"bc8b942e-d94b-4eb0-971a-8001b91111ec\" (UID: \"bc8b942e-d94b-4eb0-971a-8001b91111ec\") " Jan 04 13:14:27 crc kubenswrapper[5003]: I0104 13:14:27.901116 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc8b942e-d94b-4eb0-971a-8001b91111ec-kube-api-access-tk8hl" (OuterVolumeSpecName: "kube-api-access-tk8hl") pod "bc8b942e-d94b-4eb0-971a-8001b91111ec" (UID: "bc8b942e-d94b-4eb0-971a-8001b91111ec"). InnerVolumeSpecName "kube-api-access-tk8hl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:14:27 crc kubenswrapper[5003]: I0104 13:14:27.997745 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk8hl\" (UniqueName: \"kubernetes.io/projected/bc8b942e-d94b-4eb0-971a-8001b91111ec-kube-api-access-tk8hl\") on node \"crc\" DevicePath \"\"" Jan 04 13:14:28 crc kubenswrapper[5003]: I0104 13:14:28.004329 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-7-default"] Jan 04 13:14:28 crc kubenswrapper[5003]: E0104 13:14:28.004659 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc8b942e-d94b-4eb0-971a-8001b91111ec" containerName="mariadb-client-6-default" Jan 04 13:14:28 crc kubenswrapper[5003]: I0104 13:14:28.004676 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc8b942e-d94b-4eb0-971a-8001b91111ec" containerName="mariadb-client-6-default" Jan 04 13:14:28 crc kubenswrapper[5003]: I0104 13:14:28.004815 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc8b942e-d94b-4eb0-971a-8001b91111ec" containerName="mariadb-client-6-default" Jan 04 13:14:28 crc kubenswrapper[5003]: I0104 13:14:28.005599 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Jan 04 13:14:28 crc kubenswrapper[5003]: I0104 13:14:28.015516 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-7-default"] Jan 04 13:14:28 crc kubenswrapper[5003]: I0104 13:14:28.098848 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6x5b2\" (UniqueName: \"kubernetes.io/projected/763ded9c-afa2-4a76-b96d-c35c75750790-kube-api-access-6x5b2\") pod \"mariadb-client-7-default\" (UID: \"763ded9c-afa2-4a76-b96d-c35c75750790\") " pod="openstack/mariadb-client-7-default" Jan 04 13:14:28 crc kubenswrapper[5003]: I0104 13:14:28.199895 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6x5b2\" (UniqueName: \"kubernetes.io/projected/763ded9c-afa2-4a76-b96d-c35c75750790-kube-api-access-6x5b2\") pod \"mariadb-client-7-default\" (UID: \"763ded9c-afa2-4a76-b96d-c35c75750790\") " pod="openstack/mariadb-client-7-default" Jan 04 13:14:28 crc kubenswrapper[5003]: I0104 13:14:28.228347 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6x5b2\" (UniqueName: \"kubernetes.io/projected/763ded9c-afa2-4a76-b96d-c35c75750790-kube-api-access-6x5b2\") pod \"mariadb-client-7-default\" (UID: \"763ded9c-afa2-4a76-b96d-c35c75750790\") " pod="openstack/mariadb-client-7-default" Jan 04 13:14:28 crc kubenswrapper[5003]: I0104 13:14:28.276118 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a102be20cfe6b846002b66646cd0d0b77e66586600d2bf560b6ede6357682160" Jan 04 13:14:28 crc kubenswrapper[5003]: I0104 13:14:28.276444 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Jan 04 13:14:28 crc kubenswrapper[5003]: I0104 13:14:28.335990 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Jan 04 13:14:28 crc kubenswrapper[5003]: I0104 13:14:28.823421 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc8b942e-d94b-4eb0-971a-8001b91111ec" path="/var/lib/kubelet/pods/bc8b942e-d94b-4eb0-971a-8001b91111ec/volumes" Jan 04 13:14:28 crc kubenswrapper[5003]: I0104 13:14:28.877872 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-7-default"] Jan 04 13:14:28 crc kubenswrapper[5003]: W0104 13:14:28.887187 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod763ded9c_afa2_4a76_b96d_c35c75750790.slice/crio-3c5a8d2f5c92247c7cca139e30a52feadae2ee276bb6bd7e751bef9b88a853f5 WatchSource:0}: Error finding container 3c5a8d2f5c92247c7cca139e30a52feadae2ee276bb6bd7e751bef9b88a853f5: Status 404 returned error can't find the container with id 3c5a8d2f5c92247c7cca139e30a52feadae2ee276bb6bd7e751bef9b88a853f5 Jan 04 13:14:29 crc kubenswrapper[5003]: I0104 13:14:29.286279 5003 generic.go:334] "Generic (PLEG): container finished" podID="763ded9c-afa2-4a76-b96d-c35c75750790" containerID="5b9db72729ecb76f63f22fbef8917cbfdf3dc8e31125257d73e74d83ef047a5b" exitCode=0 Jan 04 13:14:29 crc kubenswrapper[5003]: I0104 13:14:29.286358 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-7-default" event={"ID":"763ded9c-afa2-4a76-b96d-c35c75750790","Type":"ContainerDied","Data":"5b9db72729ecb76f63f22fbef8917cbfdf3dc8e31125257d73e74d83ef047a5b"} Jan 04 13:14:29 crc kubenswrapper[5003]: I0104 13:14:29.286484 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-7-default" event={"ID":"763ded9c-afa2-4a76-b96d-c35c75750790","Type":"ContainerStarted","Data":"3c5a8d2f5c92247c7cca139e30a52feadae2ee276bb6bd7e751bef9b88a853f5"} Jan 04 13:14:30 crc kubenswrapper[5003]: I0104 13:14:30.674353 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Jan 04 13:14:30 crc kubenswrapper[5003]: I0104 13:14:30.696791 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-7-default_763ded9c-afa2-4a76-b96d-c35c75750790/mariadb-client-7-default/0.log" Jan 04 13:14:30 crc kubenswrapper[5003]: I0104 13:14:30.731459 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-7-default"] Jan 04 13:14:30 crc kubenswrapper[5003]: I0104 13:14:30.736912 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-7-default"] Jan 04 13:14:30 crc kubenswrapper[5003]: I0104 13:14:30.739576 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6x5b2\" (UniqueName: \"kubernetes.io/projected/763ded9c-afa2-4a76-b96d-c35c75750790-kube-api-access-6x5b2\") pod \"763ded9c-afa2-4a76-b96d-c35c75750790\" (UID: \"763ded9c-afa2-4a76-b96d-c35c75750790\") " Jan 04 13:14:30 crc kubenswrapper[5003]: I0104 13:14:30.746235 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/763ded9c-afa2-4a76-b96d-c35c75750790-kube-api-access-6x5b2" (OuterVolumeSpecName: "kube-api-access-6x5b2") pod "763ded9c-afa2-4a76-b96d-c35c75750790" (UID: "763ded9c-afa2-4a76-b96d-c35c75750790"). InnerVolumeSpecName "kube-api-access-6x5b2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:14:30 crc kubenswrapper[5003]: I0104 13:14:30.819659 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="763ded9c-afa2-4a76-b96d-c35c75750790" path="/var/lib/kubelet/pods/763ded9c-afa2-4a76-b96d-c35c75750790/volumes" Jan 04 13:14:30 crc kubenswrapper[5003]: I0104 13:14:30.841762 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6x5b2\" (UniqueName: \"kubernetes.io/projected/763ded9c-afa2-4a76-b96d-c35c75750790-kube-api-access-6x5b2\") on node \"crc\" DevicePath \"\"" Jan 04 13:14:30 crc kubenswrapper[5003]: I0104 13:14:30.904995 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-2"] Jan 04 13:14:30 crc kubenswrapper[5003]: E0104 13:14:30.905598 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="763ded9c-afa2-4a76-b96d-c35c75750790" containerName="mariadb-client-7-default" Jan 04 13:14:30 crc kubenswrapper[5003]: I0104 13:14:30.905633 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="763ded9c-afa2-4a76-b96d-c35c75750790" containerName="mariadb-client-7-default" Jan 04 13:14:30 crc kubenswrapper[5003]: I0104 13:14:30.905913 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="763ded9c-afa2-4a76-b96d-c35c75750790" containerName="mariadb-client-7-default" Jan 04 13:14:30 crc kubenswrapper[5003]: I0104 13:14:30.906853 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Jan 04 13:14:30 crc kubenswrapper[5003]: I0104 13:14:30.916143 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2"] Jan 04 13:14:31 crc kubenswrapper[5003]: I0104 13:14:31.044758 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vj8mj\" (UniqueName: \"kubernetes.io/projected/c21d5fa2-210c-4b8c-8aa1-739ab80e5189-kube-api-access-vj8mj\") pod \"mariadb-client-2\" (UID: \"c21d5fa2-210c-4b8c-8aa1-739ab80e5189\") " pod="openstack/mariadb-client-2" Jan 04 13:14:31 crc kubenswrapper[5003]: I0104 13:14:31.146760 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vj8mj\" (UniqueName: \"kubernetes.io/projected/c21d5fa2-210c-4b8c-8aa1-739ab80e5189-kube-api-access-vj8mj\") pod \"mariadb-client-2\" (UID: \"c21d5fa2-210c-4b8c-8aa1-739ab80e5189\") " pod="openstack/mariadb-client-2" Jan 04 13:14:31 crc kubenswrapper[5003]: I0104 13:14:31.171369 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vj8mj\" (UniqueName: \"kubernetes.io/projected/c21d5fa2-210c-4b8c-8aa1-739ab80e5189-kube-api-access-vj8mj\") pod \"mariadb-client-2\" (UID: \"c21d5fa2-210c-4b8c-8aa1-739ab80e5189\") " pod="openstack/mariadb-client-2" Jan 04 13:14:31 crc kubenswrapper[5003]: I0104 13:14:31.221365 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Jan 04 13:14:31 crc kubenswrapper[5003]: I0104 13:14:31.305378 5003 scope.go:117] "RemoveContainer" containerID="5b9db72729ecb76f63f22fbef8917cbfdf3dc8e31125257d73e74d83ef047a5b" Jan 04 13:14:31 crc kubenswrapper[5003]: I0104 13:14:31.305514 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Jan 04 13:14:31 crc kubenswrapper[5003]: I0104 13:14:31.624215 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2"] Jan 04 13:14:31 crc kubenswrapper[5003]: W0104 13:14:31.631762 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc21d5fa2_210c_4b8c_8aa1_739ab80e5189.slice/crio-e86a9fcfa1eb3e794c8dfb3cd5e4bd9b67df804dac98b9d4b09c42a1374683e3 WatchSource:0}: Error finding container e86a9fcfa1eb3e794c8dfb3cd5e4bd9b67df804dac98b9d4b09c42a1374683e3: Status 404 returned error can't find the container with id e86a9fcfa1eb3e794c8dfb3cd5e4bd9b67df804dac98b9d4b09c42a1374683e3 Jan 04 13:14:32 crc kubenswrapper[5003]: I0104 13:14:32.313960 5003 generic.go:334] "Generic (PLEG): container finished" podID="c21d5fa2-210c-4b8c-8aa1-739ab80e5189" containerID="56e2a7be88045c4d67c8c688eb3089019ac2be40ccc6c9613295aabfcec7cb5a" exitCode=0 Jan 04 13:14:32 crc kubenswrapper[5003]: I0104 13:14:32.314099 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2" event={"ID":"c21d5fa2-210c-4b8c-8aa1-739ab80e5189","Type":"ContainerDied","Data":"56e2a7be88045c4d67c8c688eb3089019ac2be40ccc6c9613295aabfcec7cb5a"} Jan 04 13:14:32 crc kubenswrapper[5003]: I0104 13:14:32.314189 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2" event={"ID":"c21d5fa2-210c-4b8c-8aa1-739ab80e5189","Type":"ContainerStarted","Data":"e86a9fcfa1eb3e794c8dfb3cd5e4bd9b67df804dac98b9d4b09c42a1374683e3"} Jan 04 13:14:33 crc kubenswrapper[5003]: I0104 13:14:33.717672 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Jan 04 13:14:33 crc kubenswrapper[5003]: I0104 13:14:33.735769 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-2_c21d5fa2-210c-4b8c-8aa1-739ab80e5189/mariadb-client-2/0.log" Jan 04 13:14:33 crc kubenswrapper[5003]: I0104 13:14:33.763738 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-2"] Jan 04 13:14:33 crc kubenswrapper[5003]: I0104 13:14:33.769383 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-2"] Jan 04 13:14:33 crc kubenswrapper[5003]: I0104 13:14:33.793169 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vj8mj\" (UniqueName: \"kubernetes.io/projected/c21d5fa2-210c-4b8c-8aa1-739ab80e5189-kube-api-access-vj8mj\") pod \"c21d5fa2-210c-4b8c-8aa1-739ab80e5189\" (UID: \"c21d5fa2-210c-4b8c-8aa1-739ab80e5189\") " Jan 04 13:14:33 crc kubenswrapper[5003]: I0104 13:14:33.799421 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c21d5fa2-210c-4b8c-8aa1-739ab80e5189-kube-api-access-vj8mj" (OuterVolumeSpecName: "kube-api-access-vj8mj") pod "c21d5fa2-210c-4b8c-8aa1-739ab80e5189" (UID: "c21d5fa2-210c-4b8c-8aa1-739ab80e5189"). InnerVolumeSpecName "kube-api-access-vj8mj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:14:33 crc kubenswrapper[5003]: I0104 13:14:33.894984 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vj8mj\" (UniqueName: \"kubernetes.io/projected/c21d5fa2-210c-4b8c-8aa1-739ab80e5189-kube-api-access-vj8mj\") on node \"crc\" DevicePath \"\"" Jan 04 13:14:34 crc kubenswrapper[5003]: I0104 13:14:34.334988 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e86a9fcfa1eb3e794c8dfb3cd5e4bd9b67df804dac98b9d4b09c42a1374683e3" Jan 04 13:14:34 crc kubenswrapper[5003]: I0104 13:14:34.335130 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Jan 04 13:14:34 crc kubenswrapper[5003]: I0104 13:14:34.811223 5003 scope.go:117] "RemoveContainer" containerID="ab89a3503b868c615a10b0dddcf9602282c1df798b3f18e301d8cf21d5ab3516" Jan 04 13:14:34 crc kubenswrapper[5003]: E0104 13:14:34.811866 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:14:34 crc kubenswrapper[5003]: I0104 13:14:34.819758 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c21d5fa2-210c-4b8c-8aa1-739ab80e5189" path="/var/lib/kubelet/pods/c21d5fa2-210c-4b8c-8aa1-739ab80e5189/volumes" Jan 04 13:14:47 crc kubenswrapper[5003]: I0104 13:14:47.806818 5003 scope.go:117] "RemoveContainer" containerID="ab89a3503b868c615a10b0dddcf9602282c1df798b3f18e301d8cf21d5ab3516" Jan 04 13:14:47 crc kubenswrapper[5003]: E0104 13:14:47.807677 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:14:58 crc kubenswrapper[5003]: I0104 13:14:58.807345 5003 scope.go:117] "RemoveContainer" containerID="ab89a3503b868c615a10b0dddcf9602282c1df798b3f18e301d8cf21d5ab3516" Jan 04 13:14:58 crc kubenswrapper[5003]: E0104 13:14:58.808071 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:15:00 crc kubenswrapper[5003]: I0104 13:15:00.156643 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458875-44mz4"] Jan 04 13:15:00 crc kubenswrapper[5003]: E0104 13:15:00.157192 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c21d5fa2-210c-4b8c-8aa1-739ab80e5189" containerName="mariadb-client-2" Jan 04 13:15:00 crc kubenswrapper[5003]: I0104 13:15:00.157211 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="c21d5fa2-210c-4b8c-8aa1-739ab80e5189" containerName="mariadb-client-2" Jan 04 13:15:00 crc kubenswrapper[5003]: I0104 13:15:00.157449 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="c21d5fa2-210c-4b8c-8aa1-739ab80e5189" containerName="mariadb-client-2" Jan 04 13:15:00 crc kubenswrapper[5003]: I0104 13:15:00.158444 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458875-44mz4" Jan 04 13:15:00 crc kubenswrapper[5003]: I0104 13:15:00.160501 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 04 13:15:00 crc kubenswrapper[5003]: I0104 13:15:00.162569 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 04 13:15:00 crc kubenswrapper[5003]: I0104 13:15:00.171984 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458875-44mz4"] Jan 04 13:15:00 crc kubenswrapper[5003]: I0104 13:15:00.336699 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4ca9f208-96ba-452c-a2c2-bcb2aae2fe22-config-volume\") pod \"collect-profiles-29458875-44mz4\" (UID: \"4ca9f208-96ba-452c-a2c2-bcb2aae2fe22\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458875-44mz4" Jan 04 13:15:00 crc kubenswrapper[5003]: I0104 13:15:00.336877 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2tclq\" (UniqueName: \"kubernetes.io/projected/4ca9f208-96ba-452c-a2c2-bcb2aae2fe22-kube-api-access-2tclq\") pod \"collect-profiles-29458875-44mz4\" (UID: \"4ca9f208-96ba-452c-a2c2-bcb2aae2fe22\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458875-44mz4" Jan 04 13:15:00 crc kubenswrapper[5003]: I0104 13:15:00.336920 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4ca9f208-96ba-452c-a2c2-bcb2aae2fe22-secret-volume\") pod \"collect-profiles-29458875-44mz4\" (UID: \"4ca9f208-96ba-452c-a2c2-bcb2aae2fe22\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458875-44mz4" Jan 04 13:15:00 crc kubenswrapper[5003]: I0104 13:15:00.438906 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4ca9f208-96ba-452c-a2c2-bcb2aae2fe22-config-volume\") pod \"collect-profiles-29458875-44mz4\" (UID: \"4ca9f208-96ba-452c-a2c2-bcb2aae2fe22\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458875-44mz4" Jan 04 13:15:00 crc kubenswrapper[5003]: I0104 13:15:00.439041 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2tclq\" (UniqueName: \"kubernetes.io/projected/4ca9f208-96ba-452c-a2c2-bcb2aae2fe22-kube-api-access-2tclq\") pod \"collect-profiles-29458875-44mz4\" (UID: \"4ca9f208-96ba-452c-a2c2-bcb2aae2fe22\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458875-44mz4" Jan 04 13:15:00 crc kubenswrapper[5003]: I0104 13:15:00.439074 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4ca9f208-96ba-452c-a2c2-bcb2aae2fe22-secret-volume\") pod \"collect-profiles-29458875-44mz4\" (UID: \"4ca9f208-96ba-452c-a2c2-bcb2aae2fe22\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458875-44mz4" Jan 04 13:15:00 crc kubenswrapper[5003]: I0104 13:15:00.439988 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4ca9f208-96ba-452c-a2c2-bcb2aae2fe22-config-volume\") pod \"collect-profiles-29458875-44mz4\" (UID: \"4ca9f208-96ba-452c-a2c2-bcb2aae2fe22\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458875-44mz4" Jan 04 13:15:00 crc kubenswrapper[5003]: I0104 13:15:00.445395 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4ca9f208-96ba-452c-a2c2-bcb2aae2fe22-secret-volume\") pod \"collect-profiles-29458875-44mz4\" (UID: \"4ca9f208-96ba-452c-a2c2-bcb2aae2fe22\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458875-44mz4" Jan 04 13:15:00 crc kubenswrapper[5003]: I0104 13:15:00.458287 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2tclq\" (UniqueName: \"kubernetes.io/projected/4ca9f208-96ba-452c-a2c2-bcb2aae2fe22-kube-api-access-2tclq\") pod \"collect-profiles-29458875-44mz4\" (UID: \"4ca9f208-96ba-452c-a2c2-bcb2aae2fe22\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458875-44mz4" Jan 04 13:15:00 crc kubenswrapper[5003]: I0104 13:15:00.527544 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458875-44mz4" Jan 04 13:15:00 crc kubenswrapper[5003]: I0104 13:15:00.965116 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458875-44mz4"] Jan 04 13:15:01 crc kubenswrapper[5003]: I0104 13:15:01.573744 5003 generic.go:334] "Generic (PLEG): container finished" podID="4ca9f208-96ba-452c-a2c2-bcb2aae2fe22" containerID="e75fcd534877f5579602be27cd188defe41b50b9c0e9f25b5fe16d060903f86d" exitCode=0 Jan 04 13:15:01 crc kubenswrapper[5003]: I0104 13:15:01.573799 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458875-44mz4" event={"ID":"4ca9f208-96ba-452c-a2c2-bcb2aae2fe22","Type":"ContainerDied","Data":"e75fcd534877f5579602be27cd188defe41b50b9c0e9f25b5fe16d060903f86d"} Jan 04 13:15:01 crc kubenswrapper[5003]: I0104 13:15:01.573832 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458875-44mz4" event={"ID":"4ca9f208-96ba-452c-a2c2-bcb2aae2fe22","Type":"ContainerStarted","Data":"0b9de02a21d36de9b67d758324c6a099ce7cf72ba78d0b10d745bc87a4eab8ac"} Jan 04 13:15:02 crc kubenswrapper[5003]: I0104 13:15:02.936974 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458875-44mz4" Jan 04 13:15:03 crc kubenswrapper[5003]: I0104 13:15:03.080084 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4ca9f208-96ba-452c-a2c2-bcb2aae2fe22-config-volume\") pod \"4ca9f208-96ba-452c-a2c2-bcb2aae2fe22\" (UID: \"4ca9f208-96ba-452c-a2c2-bcb2aae2fe22\") " Jan 04 13:15:03 crc kubenswrapper[5003]: I0104 13:15:03.080274 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4ca9f208-96ba-452c-a2c2-bcb2aae2fe22-secret-volume\") pod \"4ca9f208-96ba-452c-a2c2-bcb2aae2fe22\" (UID: \"4ca9f208-96ba-452c-a2c2-bcb2aae2fe22\") " Jan 04 13:15:03 crc kubenswrapper[5003]: I0104 13:15:03.080521 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2tclq\" (UniqueName: \"kubernetes.io/projected/4ca9f208-96ba-452c-a2c2-bcb2aae2fe22-kube-api-access-2tclq\") pod \"4ca9f208-96ba-452c-a2c2-bcb2aae2fe22\" (UID: \"4ca9f208-96ba-452c-a2c2-bcb2aae2fe22\") " Jan 04 13:15:03 crc kubenswrapper[5003]: I0104 13:15:03.081007 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ca9f208-96ba-452c-a2c2-bcb2aae2fe22-config-volume" (OuterVolumeSpecName: "config-volume") pod "4ca9f208-96ba-452c-a2c2-bcb2aae2fe22" (UID: "4ca9f208-96ba-452c-a2c2-bcb2aae2fe22"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 13:15:03 crc kubenswrapper[5003]: I0104 13:15:03.086588 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ca9f208-96ba-452c-a2c2-bcb2aae2fe22-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4ca9f208-96ba-452c-a2c2-bcb2aae2fe22" (UID: "4ca9f208-96ba-452c-a2c2-bcb2aae2fe22"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 13:15:03 crc kubenswrapper[5003]: I0104 13:15:03.087182 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ca9f208-96ba-452c-a2c2-bcb2aae2fe22-kube-api-access-2tclq" (OuterVolumeSpecName: "kube-api-access-2tclq") pod "4ca9f208-96ba-452c-a2c2-bcb2aae2fe22" (UID: "4ca9f208-96ba-452c-a2c2-bcb2aae2fe22"). InnerVolumeSpecName "kube-api-access-2tclq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:15:03 crc kubenswrapper[5003]: I0104 13:15:03.182984 5003 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4ca9f208-96ba-452c-a2c2-bcb2aae2fe22-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 04 13:15:03 crc kubenswrapper[5003]: I0104 13:15:03.183049 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2tclq\" (UniqueName: \"kubernetes.io/projected/4ca9f208-96ba-452c-a2c2-bcb2aae2fe22-kube-api-access-2tclq\") on node \"crc\" DevicePath \"\"" Jan 04 13:15:03 crc kubenswrapper[5003]: I0104 13:15:03.183065 5003 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4ca9f208-96ba-452c-a2c2-bcb2aae2fe22-config-volume\") on node \"crc\" DevicePath \"\"" Jan 04 13:15:03 crc kubenswrapper[5003]: I0104 13:15:03.592561 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458875-44mz4" event={"ID":"4ca9f208-96ba-452c-a2c2-bcb2aae2fe22","Type":"ContainerDied","Data":"0b9de02a21d36de9b67d758324c6a099ce7cf72ba78d0b10d745bc87a4eab8ac"} Jan 04 13:15:03 crc kubenswrapper[5003]: I0104 13:15:03.592864 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0b9de02a21d36de9b67d758324c6a099ce7cf72ba78d0b10d745bc87a4eab8ac" Jan 04 13:15:03 crc kubenswrapper[5003]: I0104 13:15:03.592663 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458875-44mz4" Jan 04 13:15:04 crc kubenswrapper[5003]: I0104 13:15:04.027722 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458830-x5wfl"] Jan 04 13:15:04 crc kubenswrapper[5003]: I0104 13:15:04.033167 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458830-x5wfl"] Jan 04 13:15:04 crc kubenswrapper[5003]: I0104 13:15:04.822531 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e311c4b-0ab7-4985-bd09-7da18cd2b17c" path="/var/lib/kubelet/pods/6e311c4b-0ab7-4985-bd09-7da18cd2b17c/volumes" Jan 04 13:15:11 crc kubenswrapper[5003]: I0104 13:15:11.806703 5003 scope.go:117] "RemoveContainer" containerID="ab89a3503b868c615a10b0dddcf9602282c1df798b3f18e301d8cf21d5ab3516" Jan 04 13:15:11 crc kubenswrapper[5003]: E0104 13:15:11.807441 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:15:14 crc kubenswrapper[5003]: I0104 13:15:14.492359 5003 scope.go:117] "RemoveContainer" containerID="a453779df5529599191a566b7afd4d90c151af89bc4daa9be2cc1251b7fec7c7" Jan 04 13:15:23 crc kubenswrapper[5003]: I0104 13:15:23.807458 5003 scope.go:117] "RemoveContainer" containerID="ab89a3503b868c615a10b0dddcf9602282c1df798b3f18e301d8cf21d5ab3516" Jan 04 13:15:23 crc kubenswrapper[5003]: E0104 13:15:23.808324 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:15:34 crc kubenswrapper[5003]: I0104 13:15:34.818059 5003 scope.go:117] "RemoveContainer" containerID="ab89a3503b868c615a10b0dddcf9602282c1df798b3f18e301d8cf21d5ab3516" Jan 04 13:15:34 crc kubenswrapper[5003]: E0104 13:15:34.819420 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:15:49 crc kubenswrapper[5003]: I0104 13:15:49.807118 5003 scope.go:117] "RemoveContainer" containerID="ab89a3503b868c615a10b0dddcf9602282c1df798b3f18e301d8cf21d5ab3516" Jan 04 13:15:49 crc kubenswrapper[5003]: E0104 13:15:49.807903 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:16:02 crc kubenswrapper[5003]: I0104 13:16:02.807536 5003 scope.go:117] "RemoveContainer" containerID="ab89a3503b868c615a10b0dddcf9602282c1df798b3f18e301d8cf21d5ab3516" Jan 04 13:16:02 crc kubenswrapper[5003]: E0104 13:16:02.809413 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:16:14 crc kubenswrapper[5003]: I0104 13:16:14.592491 5003 scope.go:117] "RemoveContainer" containerID="40426b391ac5be036555cca8d9048604a2d05c6c3f5b947e18b6d660cb35f26c" Jan 04 13:16:17 crc kubenswrapper[5003]: I0104 13:16:17.806746 5003 scope.go:117] "RemoveContainer" containerID="ab89a3503b868c615a10b0dddcf9602282c1df798b3f18e301d8cf21d5ab3516" Jan 04 13:16:18 crc kubenswrapper[5003]: I0104 13:16:18.362599 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerStarted","Data":"97829298803abd5773d5e3270e0265b933495a48a989f0b5b421d823db2a3293"} Jan 04 13:16:58 crc kubenswrapper[5003]: I0104 13:16:58.414963 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-n9vp2"] Jan 04 13:16:58 crc kubenswrapper[5003]: E0104 13:16:58.416001 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ca9f208-96ba-452c-a2c2-bcb2aae2fe22" containerName="collect-profiles" Jan 04 13:16:58 crc kubenswrapper[5003]: I0104 13:16:58.416037 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ca9f208-96ba-452c-a2c2-bcb2aae2fe22" containerName="collect-profiles" Jan 04 13:16:58 crc kubenswrapper[5003]: I0104 13:16:58.416238 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ca9f208-96ba-452c-a2c2-bcb2aae2fe22" containerName="collect-profiles" Jan 04 13:16:58 crc kubenswrapper[5003]: I0104 13:16:58.417570 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n9vp2" Jan 04 13:16:58 crc kubenswrapper[5003]: I0104 13:16:58.430441 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbkqx\" (UniqueName: \"kubernetes.io/projected/e6237748-e508-4a8d-9c27-67728f6694b4-kube-api-access-cbkqx\") pod \"community-operators-n9vp2\" (UID: \"e6237748-e508-4a8d-9c27-67728f6694b4\") " pod="openshift-marketplace/community-operators-n9vp2" Jan 04 13:16:58 crc kubenswrapper[5003]: I0104 13:16:58.430554 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6237748-e508-4a8d-9c27-67728f6694b4-utilities\") pod \"community-operators-n9vp2\" (UID: \"e6237748-e508-4a8d-9c27-67728f6694b4\") " pod="openshift-marketplace/community-operators-n9vp2" Jan 04 13:16:58 crc kubenswrapper[5003]: I0104 13:16:58.430837 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6237748-e508-4a8d-9c27-67728f6694b4-catalog-content\") pod \"community-operators-n9vp2\" (UID: \"e6237748-e508-4a8d-9c27-67728f6694b4\") " pod="openshift-marketplace/community-operators-n9vp2" Jan 04 13:16:58 crc kubenswrapper[5003]: I0104 13:16:58.448660 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-n9vp2"] Jan 04 13:16:58 crc kubenswrapper[5003]: I0104 13:16:58.532506 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6237748-e508-4a8d-9c27-67728f6694b4-catalog-content\") pod \"community-operators-n9vp2\" (UID: \"e6237748-e508-4a8d-9c27-67728f6694b4\") " pod="openshift-marketplace/community-operators-n9vp2" Jan 04 13:16:58 crc kubenswrapper[5003]: I0104 13:16:58.532588 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbkqx\" (UniqueName: \"kubernetes.io/projected/e6237748-e508-4a8d-9c27-67728f6694b4-kube-api-access-cbkqx\") pod \"community-operators-n9vp2\" (UID: \"e6237748-e508-4a8d-9c27-67728f6694b4\") " pod="openshift-marketplace/community-operators-n9vp2" Jan 04 13:16:58 crc kubenswrapper[5003]: I0104 13:16:58.532643 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6237748-e508-4a8d-9c27-67728f6694b4-utilities\") pod \"community-operators-n9vp2\" (UID: \"e6237748-e508-4a8d-9c27-67728f6694b4\") " pod="openshift-marketplace/community-operators-n9vp2" Jan 04 13:16:58 crc kubenswrapper[5003]: I0104 13:16:58.533356 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6237748-e508-4a8d-9c27-67728f6694b4-catalog-content\") pod \"community-operators-n9vp2\" (UID: \"e6237748-e508-4a8d-9c27-67728f6694b4\") " pod="openshift-marketplace/community-operators-n9vp2" Jan 04 13:16:58 crc kubenswrapper[5003]: I0104 13:16:58.533773 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6237748-e508-4a8d-9c27-67728f6694b4-utilities\") pod \"community-operators-n9vp2\" (UID: \"e6237748-e508-4a8d-9c27-67728f6694b4\") " pod="openshift-marketplace/community-operators-n9vp2" Jan 04 13:16:58 crc kubenswrapper[5003]: I0104 13:16:58.558891 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbkqx\" (UniqueName: \"kubernetes.io/projected/e6237748-e508-4a8d-9c27-67728f6694b4-kube-api-access-cbkqx\") pod \"community-operators-n9vp2\" (UID: \"e6237748-e508-4a8d-9c27-67728f6694b4\") " pod="openshift-marketplace/community-operators-n9vp2" Jan 04 13:16:58 crc kubenswrapper[5003]: I0104 13:16:58.752227 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n9vp2" Jan 04 13:16:59 crc kubenswrapper[5003]: I0104 13:16:59.246150 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-n9vp2"] Jan 04 13:16:59 crc kubenswrapper[5003]: I0104 13:16:59.870558 5003 generic.go:334] "Generic (PLEG): container finished" podID="e6237748-e508-4a8d-9c27-67728f6694b4" containerID="d715813b063950e1009e8e9ac3d7688fc4fbeb02b997249594579478eb98bae4" exitCode=0 Jan 04 13:16:59 crc kubenswrapper[5003]: I0104 13:16:59.870631 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9vp2" event={"ID":"e6237748-e508-4a8d-9c27-67728f6694b4","Type":"ContainerDied","Data":"d715813b063950e1009e8e9ac3d7688fc4fbeb02b997249594579478eb98bae4"} Jan 04 13:16:59 crc kubenswrapper[5003]: I0104 13:16:59.870935 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9vp2" event={"ID":"e6237748-e508-4a8d-9c27-67728f6694b4","Type":"ContainerStarted","Data":"71ae4754ebba4ec7d681605bb75e955af581593c4c2dc6f976830f8da81c3e7b"} Jan 04 13:16:59 crc kubenswrapper[5003]: I0104 13:16:59.873949 5003 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 04 13:17:00 crc kubenswrapper[5003]: I0104 13:17:00.883742 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9vp2" event={"ID":"e6237748-e508-4a8d-9c27-67728f6694b4","Type":"ContainerStarted","Data":"c6dc8aa5f6c5e586b4974d79f8e2c6bb46f2e3f805e83c46aba58bd33627920a"} Jan 04 13:17:01 crc kubenswrapper[5003]: I0104 13:17:01.898301 5003 generic.go:334] "Generic (PLEG): container finished" podID="e6237748-e508-4a8d-9c27-67728f6694b4" containerID="c6dc8aa5f6c5e586b4974d79f8e2c6bb46f2e3f805e83c46aba58bd33627920a" exitCode=0 Jan 04 13:17:01 crc kubenswrapper[5003]: I0104 13:17:01.898344 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9vp2" event={"ID":"e6237748-e508-4a8d-9c27-67728f6694b4","Type":"ContainerDied","Data":"c6dc8aa5f6c5e586b4974d79f8e2c6bb46f2e3f805e83c46aba58bd33627920a"} Jan 04 13:17:02 crc kubenswrapper[5003]: I0104 13:17:02.912916 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9vp2" event={"ID":"e6237748-e508-4a8d-9c27-67728f6694b4","Type":"ContainerStarted","Data":"7c4b7a45e8579bfccb3ffa77256ddb6324194e4f147cfa5c6c1979138b889536"} Jan 04 13:17:02 crc kubenswrapper[5003]: I0104 13:17:02.948334 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-n9vp2" podStartSLOduration=2.450707069 podStartE2EDuration="4.948301269s" podCreationTimestamp="2026-01-04 13:16:58 +0000 UTC" firstStartedPulling="2026-01-04 13:16:59.873535525 +0000 UTC m=+5335.346565406" lastFinishedPulling="2026-01-04 13:17:02.371129735 +0000 UTC m=+5337.844159606" observedRunningTime="2026-01-04 13:17:02.944700833 +0000 UTC m=+5338.417730704" watchObservedRunningTime="2026-01-04 13:17:02.948301269 +0000 UTC m=+5338.421331140" Jan 04 13:17:08 crc kubenswrapper[5003]: I0104 13:17:08.752842 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-n9vp2" Jan 04 13:17:08 crc kubenswrapper[5003]: I0104 13:17:08.753804 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-n9vp2" Jan 04 13:17:08 crc kubenswrapper[5003]: I0104 13:17:08.818557 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-n9vp2" Jan 04 13:17:09 crc kubenswrapper[5003]: I0104 13:17:09.048643 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-n9vp2" Jan 04 13:17:09 crc kubenswrapper[5003]: I0104 13:17:09.104383 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-n9vp2"] Jan 04 13:17:11 crc kubenswrapper[5003]: I0104 13:17:10.997802 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-n9vp2" podUID="e6237748-e508-4a8d-9c27-67728f6694b4" containerName="registry-server" containerID="cri-o://7c4b7a45e8579bfccb3ffa77256ddb6324194e4f147cfa5c6c1979138b889536" gracePeriod=2 Jan 04 13:17:11 crc kubenswrapper[5003]: I0104 13:17:11.575736 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n9vp2" Jan 04 13:17:11 crc kubenswrapper[5003]: I0104 13:17:11.661169 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6237748-e508-4a8d-9c27-67728f6694b4-catalog-content\") pod \"e6237748-e508-4a8d-9c27-67728f6694b4\" (UID: \"e6237748-e508-4a8d-9c27-67728f6694b4\") " Jan 04 13:17:11 crc kubenswrapper[5003]: I0104 13:17:11.661428 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6237748-e508-4a8d-9c27-67728f6694b4-utilities\") pod \"e6237748-e508-4a8d-9c27-67728f6694b4\" (UID: \"e6237748-e508-4a8d-9c27-67728f6694b4\") " Jan 04 13:17:11 crc kubenswrapper[5003]: I0104 13:17:11.661546 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cbkqx\" (UniqueName: \"kubernetes.io/projected/e6237748-e508-4a8d-9c27-67728f6694b4-kube-api-access-cbkqx\") pod \"e6237748-e508-4a8d-9c27-67728f6694b4\" (UID: \"e6237748-e508-4a8d-9c27-67728f6694b4\") " Jan 04 13:17:11 crc kubenswrapper[5003]: I0104 13:17:11.664269 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6237748-e508-4a8d-9c27-67728f6694b4-utilities" (OuterVolumeSpecName: "utilities") pod "e6237748-e508-4a8d-9c27-67728f6694b4" (UID: "e6237748-e508-4a8d-9c27-67728f6694b4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:17:11 crc kubenswrapper[5003]: I0104 13:17:11.680441 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6237748-e508-4a8d-9c27-67728f6694b4-kube-api-access-cbkqx" (OuterVolumeSpecName: "kube-api-access-cbkqx") pod "e6237748-e508-4a8d-9c27-67728f6694b4" (UID: "e6237748-e508-4a8d-9c27-67728f6694b4"). InnerVolumeSpecName "kube-api-access-cbkqx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:17:11 crc kubenswrapper[5003]: I0104 13:17:11.765332 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6237748-e508-4a8d-9c27-67728f6694b4-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 13:17:11 crc kubenswrapper[5003]: I0104 13:17:11.765382 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cbkqx\" (UniqueName: \"kubernetes.io/projected/e6237748-e508-4a8d-9c27-67728f6694b4-kube-api-access-cbkqx\") on node \"crc\" DevicePath \"\"" Jan 04 13:17:11 crc kubenswrapper[5003]: I0104 13:17:11.765389 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6237748-e508-4a8d-9c27-67728f6694b4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e6237748-e508-4a8d-9c27-67728f6694b4" (UID: "e6237748-e508-4a8d-9c27-67728f6694b4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:17:11 crc kubenswrapper[5003]: I0104 13:17:11.867088 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6237748-e508-4a8d-9c27-67728f6694b4-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 13:17:12 crc kubenswrapper[5003]: I0104 13:17:12.017835 5003 generic.go:334] "Generic (PLEG): container finished" podID="e6237748-e508-4a8d-9c27-67728f6694b4" containerID="7c4b7a45e8579bfccb3ffa77256ddb6324194e4f147cfa5c6c1979138b889536" exitCode=0 Jan 04 13:17:12 crc kubenswrapper[5003]: I0104 13:17:12.017897 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9vp2" event={"ID":"e6237748-e508-4a8d-9c27-67728f6694b4","Type":"ContainerDied","Data":"7c4b7a45e8579bfccb3ffa77256ddb6324194e4f147cfa5c6c1979138b889536"} Jan 04 13:17:12 crc kubenswrapper[5003]: I0104 13:17:12.017933 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-n9vp2" event={"ID":"e6237748-e508-4a8d-9c27-67728f6694b4","Type":"ContainerDied","Data":"71ae4754ebba4ec7d681605bb75e955af581593c4c2dc6f976830f8da81c3e7b"} Jan 04 13:17:12 crc kubenswrapper[5003]: I0104 13:17:12.017954 5003 scope.go:117] "RemoveContainer" containerID="7c4b7a45e8579bfccb3ffa77256ddb6324194e4f147cfa5c6c1979138b889536" Jan 04 13:17:12 crc kubenswrapper[5003]: I0104 13:17:12.018107 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-n9vp2" Jan 04 13:17:12 crc kubenswrapper[5003]: I0104 13:17:12.058812 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-n9vp2"] Jan 04 13:17:12 crc kubenswrapper[5003]: I0104 13:17:12.063569 5003 scope.go:117] "RemoveContainer" containerID="c6dc8aa5f6c5e586b4974d79f8e2c6bb46f2e3f805e83c46aba58bd33627920a" Jan 04 13:17:12 crc kubenswrapper[5003]: I0104 13:17:12.072521 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-n9vp2"] Jan 04 13:17:12 crc kubenswrapper[5003]: I0104 13:17:12.091587 5003 scope.go:117] "RemoveContainer" containerID="d715813b063950e1009e8e9ac3d7688fc4fbeb02b997249594579478eb98bae4" Jan 04 13:17:12 crc kubenswrapper[5003]: I0104 13:17:12.148509 5003 scope.go:117] "RemoveContainer" containerID="7c4b7a45e8579bfccb3ffa77256ddb6324194e4f147cfa5c6c1979138b889536" Jan 04 13:17:12 crc kubenswrapper[5003]: E0104 13:17:12.149395 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c4b7a45e8579bfccb3ffa77256ddb6324194e4f147cfa5c6c1979138b889536\": container with ID starting with 7c4b7a45e8579bfccb3ffa77256ddb6324194e4f147cfa5c6c1979138b889536 not found: ID does not exist" containerID="7c4b7a45e8579bfccb3ffa77256ddb6324194e4f147cfa5c6c1979138b889536" Jan 04 13:17:12 crc kubenswrapper[5003]: I0104 13:17:12.149598 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c4b7a45e8579bfccb3ffa77256ddb6324194e4f147cfa5c6c1979138b889536"} err="failed to get container status \"7c4b7a45e8579bfccb3ffa77256ddb6324194e4f147cfa5c6c1979138b889536\": rpc error: code = NotFound desc = could not find container \"7c4b7a45e8579bfccb3ffa77256ddb6324194e4f147cfa5c6c1979138b889536\": container with ID starting with 7c4b7a45e8579bfccb3ffa77256ddb6324194e4f147cfa5c6c1979138b889536 not found: ID does not exist" Jan 04 13:17:12 crc kubenswrapper[5003]: I0104 13:17:12.149647 5003 scope.go:117] "RemoveContainer" containerID="c6dc8aa5f6c5e586b4974d79f8e2c6bb46f2e3f805e83c46aba58bd33627920a" Jan 04 13:17:12 crc kubenswrapper[5003]: E0104 13:17:12.150286 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6dc8aa5f6c5e586b4974d79f8e2c6bb46f2e3f805e83c46aba58bd33627920a\": container with ID starting with c6dc8aa5f6c5e586b4974d79f8e2c6bb46f2e3f805e83c46aba58bd33627920a not found: ID does not exist" containerID="c6dc8aa5f6c5e586b4974d79f8e2c6bb46f2e3f805e83c46aba58bd33627920a" Jan 04 13:17:12 crc kubenswrapper[5003]: I0104 13:17:12.150529 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6dc8aa5f6c5e586b4974d79f8e2c6bb46f2e3f805e83c46aba58bd33627920a"} err="failed to get container status \"c6dc8aa5f6c5e586b4974d79f8e2c6bb46f2e3f805e83c46aba58bd33627920a\": rpc error: code = NotFound desc = could not find container \"c6dc8aa5f6c5e586b4974d79f8e2c6bb46f2e3f805e83c46aba58bd33627920a\": container with ID starting with c6dc8aa5f6c5e586b4974d79f8e2c6bb46f2e3f805e83c46aba58bd33627920a not found: ID does not exist" Jan 04 13:17:12 crc kubenswrapper[5003]: I0104 13:17:12.150708 5003 scope.go:117] "RemoveContainer" containerID="d715813b063950e1009e8e9ac3d7688fc4fbeb02b997249594579478eb98bae4" Jan 04 13:17:12 crc kubenswrapper[5003]: E0104 13:17:12.151308 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d715813b063950e1009e8e9ac3d7688fc4fbeb02b997249594579478eb98bae4\": container with ID starting with d715813b063950e1009e8e9ac3d7688fc4fbeb02b997249594579478eb98bae4 not found: ID does not exist" containerID="d715813b063950e1009e8e9ac3d7688fc4fbeb02b997249594579478eb98bae4" Jan 04 13:17:12 crc kubenswrapper[5003]: I0104 13:17:12.151487 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d715813b063950e1009e8e9ac3d7688fc4fbeb02b997249594579478eb98bae4"} err="failed to get container status \"d715813b063950e1009e8e9ac3d7688fc4fbeb02b997249594579478eb98bae4\": rpc error: code = NotFound desc = could not find container \"d715813b063950e1009e8e9ac3d7688fc4fbeb02b997249594579478eb98bae4\": container with ID starting with d715813b063950e1009e8e9ac3d7688fc4fbeb02b997249594579478eb98bae4 not found: ID does not exist" Jan 04 13:17:12 crc kubenswrapper[5003]: I0104 13:17:12.824960 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6237748-e508-4a8d-9c27-67728f6694b4" path="/var/lib/kubelet/pods/e6237748-e508-4a8d-9c27-67728f6694b4/volumes" Jan 04 13:18:14 crc kubenswrapper[5003]: I0104 13:18:14.110302 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-copy-data"] Jan 04 13:18:14 crc kubenswrapper[5003]: E0104 13:18:14.112992 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6237748-e508-4a8d-9c27-67728f6694b4" containerName="registry-server" Jan 04 13:18:14 crc kubenswrapper[5003]: I0104 13:18:14.113101 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6237748-e508-4a8d-9c27-67728f6694b4" containerName="registry-server" Jan 04 13:18:14 crc kubenswrapper[5003]: E0104 13:18:14.113196 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6237748-e508-4a8d-9c27-67728f6694b4" containerName="extract-utilities" Jan 04 13:18:14 crc kubenswrapper[5003]: I0104 13:18:14.113232 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6237748-e508-4a8d-9c27-67728f6694b4" containerName="extract-utilities" Jan 04 13:18:14 crc kubenswrapper[5003]: E0104 13:18:14.113277 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6237748-e508-4a8d-9c27-67728f6694b4" containerName="extract-content" Jan 04 13:18:14 crc kubenswrapper[5003]: I0104 13:18:14.113307 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6237748-e508-4a8d-9c27-67728f6694b4" containerName="extract-content" Jan 04 13:18:14 crc kubenswrapper[5003]: I0104 13:18:14.113938 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6237748-e508-4a8d-9c27-67728f6694b4" containerName="registry-server" Jan 04 13:18:14 crc kubenswrapper[5003]: I0104 13:18:14.115814 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Jan 04 13:18:14 crc kubenswrapper[5003]: I0104 13:18:14.120802 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-bg47z" Jan 04 13:18:14 crc kubenswrapper[5003]: I0104 13:18:14.137968 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Jan 04 13:18:14 crc kubenswrapper[5003]: I0104 13:18:14.195867 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-b552c72e-2c2f-4313-ac12-f538aad78dec\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b552c72e-2c2f-4313-ac12-f538aad78dec\") pod \"mariadb-copy-data\" (UID: \"c3e0f94e-4084-44d2-84ea-9da47439225d\") " pod="openstack/mariadb-copy-data" Jan 04 13:18:14 crc kubenswrapper[5003]: I0104 13:18:14.196348 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xrdl\" (UniqueName: \"kubernetes.io/projected/c3e0f94e-4084-44d2-84ea-9da47439225d-kube-api-access-6xrdl\") pod \"mariadb-copy-data\" (UID: \"c3e0f94e-4084-44d2-84ea-9da47439225d\") " pod="openstack/mariadb-copy-data" Jan 04 13:18:14 crc kubenswrapper[5003]: I0104 13:18:14.299182 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-b552c72e-2c2f-4313-ac12-f538aad78dec\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b552c72e-2c2f-4313-ac12-f538aad78dec\") pod \"mariadb-copy-data\" (UID: \"c3e0f94e-4084-44d2-84ea-9da47439225d\") " pod="openstack/mariadb-copy-data" Jan 04 13:18:14 crc kubenswrapper[5003]: I0104 13:18:14.299405 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xrdl\" (UniqueName: \"kubernetes.io/projected/c3e0f94e-4084-44d2-84ea-9da47439225d-kube-api-access-6xrdl\") pod \"mariadb-copy-data\" (UID: \"c3e0f94e-4084-44d2-84ea-9da47439225d\") " pod="openstack/mariadb-copy-data" Jan 04 13:18:14 crc kubenswrapper[5003]: I0104 13:18:14.305316 5003 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 04 13:18:14 crc kubenswrapper[5003]: I0104 13:18:14.305389 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-b552c72e-2c2f-4313-ac12-f538aad78dec\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b552c72e-2c2f-4313-ac12-f538aad78dec\") pod \"mariadb-copy-data\" (UID: \"c3e0f94e-4084-44d2-84ea-9da47439225d\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/72dbdfcad071bf8778053ed2a0bf63d34c29f6210d4ae3b3855bc1c80416d326/globalmount\"" pod="openstack/mariadb-copy-data" Jan 04 13:18:14 crc kubenswrapper[5003]: I0104 13:18:14.327868 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xrdl\" (UniqueName: \"kubernetes.io/projected/c3e0f94e-4084-44d2-84ea-9da47439225d-kube-api-access-6xrdl\") pod \"mariadb-copy-data\" (UID: \"c3e0f94e-4084-44d2-84ea-9da47439225d\") " pod="openstack/mariadb-copy-data" Jan 04 13:18:14 crc kubenswrapper[5003]: I0104 13:18:14.370274 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-b552c72e-2c2f-4313-ac12-f538aad78dec\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b552c72e-2c2f-4313-ac12-f538aad78dec\") pod \"mariadb-copy-data\" (UID: \"c3e0f94e-4084-44d2-84ea-9da47439225d\") " pod="openstack/mariadb-copy-data" Jan 04 13:18:14 crc kubenswrapper[5003]: I0104 13:18:14.468738 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Jan 04 13:18:15 crc kubenswrapper[5003]: I0104 13:18:15.098634 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Jan 04 13:18:15 crc kubenswrapper[5003]: I0104 13:18:15.761781 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"c3e0f94e-4084-44d2-84ea-9da47439225d","Type":"ContainerStarted","Data":"0fb76e0d2acfb7461b8fb79466ec3d3d4ffa6cc55474b0197cf4793ff8ecfcda"} Jan 04 13:18:15 crc kubenswrapper[5003]: I0104 13:18:15.762497 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"c3e0f94e-4084-44d2-84ea-9da47439225d","Type":"ContainerStarted","Data":"db70697965a65325e61dab054665d01662bfa9d9114026fd31b58bd4d5de17f9"} Jan 04 13:18:15 crc kubenswrapper[5003]: I0104 13:18:15.784634 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-copy-data" podStartSLOduration=2.7846024160000002 podStartE2EDuration="2.784602416s" podCreationTimestamp="2026-01-04 13:18:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 13:18:15.778599777 +0000 UTC m=+5411.251629648" watchObservedRunningTime="2026-01-04 13:18:15.784602416 +0000 UTC m=+5411.257632297" Jan 04 13:18:19 crc kubenswrapper[5003]: I0104 13:18:19.419804 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Jan 04 13:18:19 crc kubenswrapper[5003]: I0104 13:18:19.422112 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 04 13:18:19 crc kubenswrapper[5003]: I0104 13:18:19.435961 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Jan 04 13:18:19 crc kubenswrapper[5003]: I0104 13:18:19.515161 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6cpn\" (UniqueName: \"kubernetes.io/projected/817acffd-3791-4405-9817-bbfed5d7d95c-kube-api-access-f6cpn\") pod \"mariadb-client\" (UID: \"817acffd-3791-4405-9817-bbfed5d7d95c\") " pod="openstack/mariadb-client" Jan 04 13:18:19 crc kubenswrapper[5003]: I0104 13:18:19.617571 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6cpn\" (UniqueName: \"kubernetes.io/projected/817acffd-3791-4405-9817-bbfed5d7d95c-kube-api-access-f6cpn\") pod \"mariadb-client\" (UID: \"817acffd-3791-4405-9817-bbfed5d7d95c\") " pod="openstack/mariadb-client" Jan 04 13:18:19 crc kubenswrapper[5003]: I0104 13:18:19.656819 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6cpn\" (UniqueName: \"kubernetes.io/projected/817acffd-3791-4405-9817-bbfed5d7d95c-kube-api-access-f6cpn\") pod \"mariadb-client\" (UID: \"817acffd-3791-4405-9817-bbfed5d7d95c\") " pod="openstack/mariadb-client" Jan 04 13:18:19 crc kubenswrapper[5003]: I0104 13:18:19.757634 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 04 13:18:20 crc kubenswrapper[5003]: I0104 13:18:20.082950 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Jan 04 13:18:20 crc kubenswrapper[5003]: I0104 13:18:20.826635 5003 generic.go:334] "Generic (PLEG): container finished" podID="817acffd-3791-4405-9817-bbfed5d7d95c" containerID="a1b204b973d54228805e8e5981e9cee4e3689a07f22e3d0af1c1e389fa06fd1a" exitCode=0 Jan 04 13:18:20 crc kubenswrapper[5003]: I0104 13:18:20.826713 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"817acffd-3791-4405-9817-bbfed5d7d95c","Type":"ContainerDied","Data":"a1b204b973d54228805e8e5981e9cee4e3689a07f22e3d0af1c1e389fa06fd1a"} Jan 04 13:18:20 crc kubenswrapper[5003]: I0104 13:18:20.826762 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"817acffd-3791-4405-9817-bbfed5d7d95c","Type":"ContainerStarted","Data":"fbe2820c4d18598331ba0b35cf585b4d31b6a8c83726097302e5020aa45cd8e8"} Jan 04 13:18:22 crc kubenswrapper[5003]: I0104 13:18:22.249309 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 04 13:18:22 crc kubenswrapper[5003]: I0104 13:18:22.265318 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f6cpn\" (UniqueName: \"kubernetes.io/projected/817acffd-3791-4405-9817-bbfed5d7d95c-kube-api-access-f6cpn\") pod \"817acffd-3791-4405-9817-bbfed5d7d95c\" (UID: \"817acffd-3791-4405-9817-bbfed5d7d95c\") " Jan 04 13:18:22 crc kubenswrapper[5003]: I0104 13:18:22.273664 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_817acffd-3791-4405-9817-bbfed5d7d95c/mariadb-client/0.log" Jan 04 13:18:22 crc kubenswrapper[5003]: I0104 13:18:22.278384 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/817acffd-3791-4405-9817-bbfed5d7d95c-kube-api-access-f6cpn" (OuterVolumeSpecName: "kube-api-access-f6cpn") pod "817acffd-3791-4405-9817-bbfed5d7d95c" (UID: "817acffd-3791-4405-9817-bbfed5d7d95c"). InnerVolumeSpecName "kube-api-access-f6cpn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:18:22 crc kubenswrapper[5003]: I0104 13:18:22.303907 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Jan 04 13:18:22 crc kubenswrapper[5003]: I0104 13:18:22.309463 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Jan 04 13:18:22 crc kubenswrapper[5003]: I0104 13:18:22.367389 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f6cpn\" (UniqueName: \"kubernetes.io/projected/817acffd-3791-4405-9817-bbfed5d7d95c-kube-api-access-f6cpn\") on node \"crc\" DevicePath \"\"" Jan 04 13:18:22 crc kubenswrapper[5003]: I0104 13:18:22.481337 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Jan 04 13:18:22 crc kubenswrapper[5003]: E0104 13:18:22.482059 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="817acffd-3791-4405-9817-bbfed5d7d95c" containerName="mariadb-client" Jan 04 13:18:22 crc kubenswrapper[5003]: I0104 13:18:22.482155 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="817acffd-3791-4405-9817-bbfed5d7d95c" containerName="mariadb-client" Jan 04 13:18:22 crc kubenswrapper[5003]: I0104 13:18:22.482419 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="817acffd-3791-4405-9817-bbfed5d7d95c" containerName="mariadb-client" Jan 04 13:18:22 crc kubenswrapper[5003]: I0104 13:18:22.483125 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 04 13:18:22 crc kubenswrapper[5003]: I0104 13:18:22.485502 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Jan 04 13:18:22 crc kubenswrapper[5003]: I0104 13:18:22.574172 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxhzm\" (UniqueName: \"kubernetes.io/projected/e17849c8-6d64-4cab-872d-c5d921ec161b-kube-api-access-pxhzm\") pod \"mariadb-client\" (UID: \"e17849c8-6d64-4cab-872d-c5d921ec161b\") " pod="openstack/mariadb-client" Jan 04 13:18:22 crc kubenswrapper[5003]: I0104 13:18:22.676081 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxhzm\" (UniqueName: \"kubernetes.io/projected/e17849c8-6d64-4cab-872d-c5d921ec161b-kube-api-access-pxhzm\") pod \"mariadb-client\" (UID: \"e17849c8-6d64-4cab-872d-c5d921ec161b\") " pod="openstack/mariadb-client" Jan 04 13:18:22 crc kubenswrapper[5003]: I0104 13:18:22.702218 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxhzm\" (UniqueName: \"kubernetes.io/projected/e17849c8-6d64-4cab-872d-c5d921ec161b-kube-api-access-pxhzm\") pod \"mariadb-client\" (UID: \"e17849c8-6d64-4cab-872d-c5d921ec161b\") " pod="openstack/mariadb-client" Jan 04 13:18:22 crc kubenswrapper[5003]: I0104 13:18:22.818338 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="817acffd-3791-4405-9817-bbfed5d7d95c" path="/var/lib/kubelet/pods/817acffd-3791-4405-9817-bbfed5d7d95c/volumes" Jan 04 13:18:22 crc kubenswrapper[5003]: I0104 13:18:22.853837 5003 scope.go:117] "RemoveContainer" containerID="a1b204b973d54228805e8e5981e9cee4e3689a07f22e3d0af1c1e389fa06fd1a" Jan 04 13:18:22 crc kubenswrapper[5003]: I0104 13:18:22.853865 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 04 13:18:22 crc kubenswrapper[5003]: I0104 13:18:22.865461 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 04 13:18:23 crc kubenswrapper[5003]: I0104 13:18:23.197653 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Jan 04 13:18:23 crc kubenswrapper[5003]: I0104 13:18:23.871588 5003 generic.go:334] "Generic (PLEG): container finished" podID="e17849c8-6d64-4cab-872d-c5d921ec161b" containerID="b6b45c7d3eabea0e3e54c917ab563b6a20e448a2adefddc404e3f39257014458" exitCode=0 Jan 04 13:18:23 crc kubenswrapper[5003]: I0104 13:18:23.871728 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"e17849c8-6d64-4cab-872d-c5d921ec161b","Type":"ContainerDied","Data":"b6b45c7d3eabea0e3e54c917ab563b6a20e448a2adefddc404e3f39257014458"} Jan 04 13:18:23 crc kubenswrapper[5003]: I0104 13:18:23.872513 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"e17849c8-6d64-4cab-872d-c5d921ec161b","Type":"ContainerStarted","Data":"c99bb5bc2fbd7c25dad0c18d30c83e21ddc4d12719fab2e7073cb36daad1fedb"} Jan 04 13:18:25 crc kubenswrapper[5003]: I0104 13:18:25.282260 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 04 13:18:25 crc kubenswrapper[5003]: I0104 13:18:25.305002 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_e17849c8-6d64-4cab-872d-c5d921ec161b/mariadb-client/0.log" Jan 04 13:18:25 crc kubenswrapper[5003]: I0104 13:18:25.349303 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Jan 04 13:18:25 crc kubenswrapper[5003]: I0104 13:18:25.356257 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Jan 04 13:18:25 crc kubenswrapper[5003]: I0104 13:18:25.427259 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pxhzm\" (UniqueName: \"kubernetes.io/projected/e17849c8-6d64-4cab-872d-c5d921ec161b-kube-api-access-pxhzm\") pod \"e17849c8-6d64-4cab-872d-c5d921ec161b\" (UID: \"e17849c8-6d64-4cab-872d-c5d921ec161b\") " Jan 04 13:18:25 crc kubenswrapper[5003]: I0104 13:18:25.434731 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e17849c8-6d64-4cab-872d-c5d921ec161b-kube-api-access-pxhzm" (OuterVolumeSpecName: "kube-api-access-pxhzm") pod "e17849c8-6d64-4cab-872d-c5d921ec161b" (UID: "e17849c8-6d64-4cab-872d-c5d921ec161b"). InnerVolumeSpecName "kube-api-access-pxhzm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:18:25 crc kubenswrapper[5003]: I0104 13:18:25.530273 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pxhzm\" (UniqueName: \"kubernetes.io/projected/e17849c8-6d64-4cab-872d-c5d921ec161b-kube-api-access-pxhzm\") on node \"crc\" DevicePath \"\"" Jan 04 13:18:25 crc kubenswrapper[5003]: I0104 13:18:25.905840 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c99bb5bc2fbd7c25dad0c18d30c83e21ddc4d12719fab2e7073cb36daad1fedb" Jan 04 13:18:25 crc kubenswrapper[5003]: I0104 13:18:25.905957 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Jan 04 13:18:26 crc kubenswrapper[5003]: I0104 13:18:26.825095 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e17849c8-6d64-4cab-872d-c5d921ec161b" path="/var/lib/kubelet/pods/e17849c8-6d64-4cab-872d-c5d921ec161b/volumes" Jan 04 13:18:39 crc kubenswrapper[5003]: I0104 13:18:39.418316 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 13:18:39 crc kubenswrapper[5003]: I0104 13:18:39.418733 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 13:18:58 crc kubenswrapper[5003]: I0104 13:18:58.520475 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-g26d9"] Jan 04 13:18:58 crc kubenswrapper[5003]: E0104 13:18:58.528890 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e17849c8-6d64-4cab-872d-c5d921ec161b" containerName="mariadb-client" Jan 04 13:18:58 crc kubenswrapper[5003]: I0104 13:18:58.528943 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="e17849c8-6d64-4cab-872d-c5d921ec161b" containerName="mariadb-client" Jan 04 13:18:58 crc kubenswrapper[5003]: I0104 13:18:58.529437 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="e17849c8-6d64-4cab-872d-c5d921ec161b" containerName="mariadb-client" Jan 04 13:18:58 crc kubenswrapper[5003]: I0104 13:18:58.531575 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g26d9" Jan 04 13:18:58 crc kubenswrapper[5003]: I0104 13:18:58.544620 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-g26d9"] Jan 04 13:18:58 crc kubenswrapper[5003]: I0104 13:18:58.573260 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d729952a-6fc6-43dc-96b7-9a387cf92217-catalog-content\") pod \"certified-operators-g26d9\" (UID: \"d729952a-6fc6-43dc-96b7-9a387cf92217\") " pod="openshift-marketplace/certified-operators-g26d9" Jan 04 13:18:58 crc kubenswrapper[5003]: I0104 13:18:58.573312 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d729952a-6fc6-43dc-96b7-9a387cf92217-utilities\") pod \"certified-operators-g26d9\" (UID: \"d729952a-6fc6-43dc-96b7-9a387cf92217\") " pod="openshift-marketplace/certified-operators-g26d9" Jan 04 13:18:58 crc kubenswrapper[5003]: I0104 13:18:58.573380 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jx9pp\" (UniqueName: \"kubernetes.io/projected/d729952a-6fc6-43dc-96b7-9a387cf92217-kube-api-access-jx9pp\") pod \"certified-operators-g26d9\" (UID: \"d729952a-6fc6-43dc-96b7-9a387cf92217\") " pod="openshift-marketplace/certified-operators-g26d9" Jan 04 13:18:58 crc kubenswrapper[5003]: I0104 13:18:58.675452 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jx9pp\" (UniqueName: \"kubernetes.io/projected/d729952a-6fc6-43dc-96b7-9a387cf92217-kube-api-access-jx9pp\") pod \"certified-operators-g26d9\" (UID: \"d729952a-6fc6-43dc-96b7-9a387cf92217\") " pod="openshift-marketplace/certified-operators-g26d9" Jan 04 13:18:58 crc kubenswrapper[5003]: I0104 13:18:58.675567 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d729952a-6fc6-43dc-96b7-9a387cf92217-catalog-content\") pod \"certified-operators-g26d9\" (UID: \"d729952a-6fc6-43dc-96b7-9a387cf92217\") " pod="openshift-marketplace/certified-operators-g26d9" Jan 04 13:18:58 crc kubenswrapper[5003]: I0104 13:18:58.675585 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d729952a-6fc6-43dc-96b7-9a387cf92217-utilities\") pod \"certified-operators-g26d9\" (UID: \"d729952a-6fc6-43dc-96b7-9a387cf92217\") " pod="openshift-marketplace/certified-operators-g26d9" Jan 04 13:18:58 crc kubenswrapper[5003]: I0104 13:18:58.676126 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d729952a-6fc6-43dc-96b7-9a387cf92217-utilities\") pod \"certified-operators-g26d9\" (UID: \"d729952a-6fc6-43dc-96b7-9a387cf92217\") " pod="openshift-marketplace/certified-operators-g26d9" Jan 04 13:18:58 crc kubenswrapper[5003]: I0104 13:18:58.676368 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d729952a-6fc6-43dc-96b7-9a387cf92217-catalog-content\") pod \"certified-operators-g26d9\" (UID: \"d729952a-6fc6-43dc-96b7-9a387cf92217\") " pod="openshift-marketplace/certified-operators-g26d9" Jan 04 13:18:58 crc kubenswrapper[5003]: I0104 13:18:58.700409 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jx9pp\" (UniqueName: \"kubernetes.io/projected/d729952a-6fc6-43dc-96b7-9a387cf92217-kube-api-access-jx9pp\") pod \"certified-operators-g26d9\" (UID: \"d729952a-6fc6-43dc-96b7-9a387cf92217\") " pod="openshift-marketplace/certified-operators-g26d9" Jan 04 13:18:58 crc kubenswrapper[5003]: I0104 13:18:58.874790 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g26d9" Jan 04 13:18:59 crc kubenswrapper[5003]: I0104 13:18:59.399712 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-g26d9"] Jan 04 13:18:59 crc kubenswrapper[5003]: I0104 13:18:59.597576 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g26d9" event={"ID":"d729952a-6fc6-43dc-96b7-9a387cf92217","Type":"ContainerStarted","Data":"7530188867b264019789cb95e71b0617ac88e5bc91950354c167bfa6da99c22d"} Jan 04 13:18:59 crc kubenswrapper[5003]: I0104 13:18:59.598158 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g26d9" event={"ID":"d729952a-6fc6-43dc-96b7-9a387cf92217","Type":"ContainerStarted","Data":"12714cbe38dd392d48d5fba1251d6b6d1e1bcd1bb8a16ebf0b10cc90457e6433"} Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.277074 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.280231 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.285741 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-jp2cm" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.289646 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.291968 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.292103 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.304041 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.314758 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.325632 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ff4f026-1e2a-44a7-848a-ccd5566ece95-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"0ff4f026-1e2a-44a7-848a-ccd5566ece95\") " pod="openstack/ovsdbserver-nb-0" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.325849 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0ff4f026-1e2a-44a7-848a-ccd5566ece95-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"0ff4f026-1e2a-44a7-848a-ccd5566ece95\") " pod="openstack/ovsdbserver-nb-0" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.325940 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h84nf\" (UniqueName: \"kubernetes.io/projected/0ff4f026-1e2a-44a7-848a-ccd5566ece95-kube-api-access-h84nf\") pod \"ovsdbserver-nb-0\" (UID: \"0ff4f026-1e2a-44a7-848a-ccd5566ece95\") " pod="openstack/ovsdbserver-nb-0" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.326107 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ff4f026-1e2a-44a7-848a-ccd5566ece95-config\") pod \"ovsdbserver-nb-0\" (UID: \"0ff4f026-1e2a-44a7-848a-ccd5566ece95\") " pod="openstack/ovsdbserver-nb-0" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.326157 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ff4f026-1e2a-44a7-848a-ccd5566ece95-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"0ff4f026-1e2a-44a7-848a-ccd5566ece95\") " pod="openstack/ovsdbserver-nb-0" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.326231 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ff4f026-1e2a-44a7-848a-ccd5566ece95-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"0ff4f026-1e2a-44a7-848a-ccd5566ece95\") " pod="openstack/ovsdbserver-nb-0" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.326298 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0ff4f026-1e2a-44a7-848a-ccd5566ece95-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"0ff4f026-1e2a-44a7-848a-ccd5566ece95\") " pod="openstack/ovsdbserver-nb-0" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.326335 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-9baa6ba5-7077-4488-814e-7bbb53e5457b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9baa6ba5-7077-4488-814e-7bbb53e5457b\") pod \"ovsdbserver-nb-0\" (UID: \"0ff4f026-1e2a-44a7-848a-ccd5566ece95\") " pod="openstack/ovsdbserver-nb-0" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.335629 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-1"] Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.341309 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.364307 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-2"] Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.367691 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.380582 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.392330 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.427828 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmrqv\" (UniqueName: \"kubernetes.io/projected/c62c7c4b-4dd2-40c8-b209-e29c1dfa255f-kube-api-access-mmrqv\") pod \"ovsdbserver-nb-1\" (UID: \"c62c7c4b-4dd2-40c8-b209-e29c1dfa255f\") " pod="openstack/ovsdbserver-nb-1" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.427884 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ff4f026-1e2a-44a7-848a-ccd5566ece95-config\") pod \"ovsdbserver-nb-0\" (UID: \"0ff4f026-1e2a-44a7-848a-ccd5566ece95\") " pod="openstack/ovsdbserver-nb-0" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.427909 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ff4f026-1e2a-44a7-848a-ccd5566ece95-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"0ff4f026-1e2a-44a7-848a-ccd5566ece95\") " pod="openstack/ovsdbserver-nb-0" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.427944 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c62c7c4b-4dd2-40c8-b209-e29c1dfa255f-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"c62c7c4b-4dd2-40c8-b209-e29c1dfa255f\") " pod="openstack/ovsdbserver-nb-1" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.427969 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-eba59b9b-8af0-453c-87a8-a7b3651daf02\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-eba59b9b-8af0-453c-87a8-a7b3651daf02\") pod \"ovsdbserver-nb-1\" (UID: \"c62c7c4b-4dd2-40c8-b209-e29c1dfa255f\") " pod="openstack/ovsdbserver-nb-1" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.427988 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-slfsh\" (UniqueName: \"kubernetes.io/projected/486fc164-58ae-47f8-bcd1-2c98e7c12b8f-kube-api-access-slfsh\") pod \"ovsdbserver-nb-2\" (UID: \"486fc164-58ae-47f8-bcd1-2c98e7c12b8f\") " pod="openstack/ovsdbserver-nb-2" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.428030 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ff4f026-1e2a-44a7-848a-ccd5566ece95-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"0ff4f026-1e2a-44a7-848a-ccd5566ece95\") " pod="openstack/ovsdbserver-nb-0" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.428051 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0ff4f026-1e2a-44a7-848a-ccd5566ece95-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"0ff4f026-1e2a-44a7-848a-ccd5566ece95\") " pod="openstack/ovsdbserver-nb-0" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.428075 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-9baa6ba5-7077-4488-814e-7bbb53e5457b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9baa6ba5-7077-4488-814e-7bbb53e5457b\") pod \"ovsdbserver-nb-0\" (UID: \"0ff4f026-1e2a-44a7-848a-ccd5566ece95\") " pod="openstack/ovsdbserver-nb-0" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.428107 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c62c7c4b-4dd2-40c8-b209-e29c1dfa255f-config\") pod \"ovsdbserver-nb-1\" (UID: \"c62c7c4b-4dd2-40c8-b209-e29c1dfa255f\") " pod="openstack/ovsdbserver-nb-1" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.428124 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/486fc164-58ae-47f8-bcd1-2c98e7c12b8f-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-2\" (UID: \"486fc164-58ae-47f8-bcd1-2c98e7c12b8f\") " pod="openstack/ovsdbserver-nb-2" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.428147 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/486fc164-58ae-47f8-bcd1-2c98e7c12b8f-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"486fc164-58ae-47f8-bcd1-2c98e7c12b8f\") " pod="openstack/ovsdbserver-nb-2" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.428166 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/486fc164-58ae-47f8-bcd1-2c98e7c12b8f-config\") pod \"ovsdbserver-nb-2\" (UID: \"486fc164-58ae-47f8-bcd1-2c98e7c12b8f\") " pod="openstack/ovsdbserver-nb-2" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.428185 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/486fc164-58ae-47f8-bcd1-2c98e7c12b8f-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-2\" (UID: \"486fc164-58ae-47f8-bcd1-2c98e7c12b8f\") " pod="openstack/ovsdbserver-nb-2" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.428210 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ff4f026-1e2a-44a7-848a-ccd5566ece95-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"0ff4f026-1e2a-44a7-848a-ccd5566ece95\") " pod="openstack/ovsdbserver-nb-0" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.428225 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/486fc164-58ae-47f8-bcd1-2c98e7c12b8f-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"486fc164-58ae-47f8-bcd1-2c98e7c12b8f\") " pod="openstack/ovsdbserver-nb-2" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.428249 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c62c7c4b-4dd2-40c8-b209-e29c1dfa255f-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-1\" (UID: \"c62c7c4b-4dd2-40c8-b209-e29c1dfa255f\") " pod="openstack/ovsdbserver-nb-1" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.428272 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0ff4f026-1e2a-44a7-848a-ccd5566ece95-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"0ff4f026-1e2a-44a7-848a-ccd5566ece95\") " pod="openstack/ovsdbserver-nb-0" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.428288 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c62c7c4b-4dd2-40c8-b209-e29c1dfa255f-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-1\" (UID: \"c62c7c4b-4dd2-40c8-b209-e29c1dfa255f\") " pod="openstack/ovsdbserver-nb-1" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.428306 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-70060294-0c27-4aa3-a402-8c53022a52e5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70060294-0c27-4aa3-a402-8c53022a52e5\") pod \"ovsdbserver-nb-2\" (UID: \"486fc164-58ae-47f8-bcd1-2c98e7c12b8f\") " pod="openstack/ovsdbserver-nb-2" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.428326 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c62c7c4b-4dd2-40c8-b209-e29c1dfa255f-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"c62c7c4b-4dd2-40c8-b209-e29c1dfa255f\") " pod="openstack/ovsdbserver-nb-1" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.428342 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c62c7c4b-4dd2-40c8-b209-e29c1dfa255f-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"c62c7c4b-4dd2-40c8-b209-e29c1dfa255f\") " pod="openstack/ovsdbserver-nb-1" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.428361 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h84nf\" (UniqueName: \"kubernetes.io/projected/0ff4f026-1e2a-44a7-848a-ccd5566ece95-kube-api-access-h84nf\") pod \"ovsdbserver-nb-0\" (UID: \"0ff4f026-1e2a-44a7-848a-ccd5566ece95\") " pod="openstack/ovsdbserver-nb-0" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.428393 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/486fc164-58ae-47f8-bcd1-2c98e7c12b8f-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"486fc164-58ae-47f8-bcd1-2c98e7c12b8f\") " pod="openstack/ovsdbserver-nb-2" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.429195 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ff4f026-1e2a-44a7-848a-ccd5566ece95-config\") pod \"ovsdbserver-nb-0\" (UID: \"0ff4f026-1e2a-44a7-848a-ccd5566ece95\") " pod="openstack/ovsdbserver-nb-0" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.430410 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0ff4f026-1e2a-44a7-848a-ccd5566ece95-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"0ff4f026-1e2a-44a7-848a-ccd5566ece95\") " pod="openstack/ovsdbserver-nb-0" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.430884 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0ff4f026-1e2a-44a7-848a-ccd5566ece95-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"0ff4f026-1e2a-44a7-848a-ccd5566ece95\") " pod="openstack/ovsdbserver-nb-0" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.438684 5003 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.438777 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-9baa6ba5-7077-4488-814e-7bbb53e5457b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9baa6ba5-7077-4488-814e-7bbb53e5457b\") pod \"ovsdbserver-nb-0\" (UID: \"0ff4f026-1e2a-44a7-848a-ccd5566ece95\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/c1f81e9642036de588227ef8a986d9a1b849e794aab12d0e87b9ff8f10dcd5d7/globalmount\"" pod="openstack/ovsdbserver-nb-0" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.445999 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ff4f026-1e2a-44a7-848a-ccd5566ece95-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"0ff4f026-1e2a-44a7-848a-ccd5566ece95\") " pod="openstack/ovsdbserver-nb-0" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.445341 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ff4f026-1e2a-44a7-848a-ccd5566ece95-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"0ff4f026-1e2a-44a7-848a-ccd5566ece95\") " pod="openstack/ovsdbserver-nb-0" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.447777 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ff4f026-1e2a-44a7-848a-ccd5566ece95-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"0ff4f026-1e2a-44a7-848a-ccd5566ece95\") " pod="openstack/ovsdbserver-nb-0" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.454554 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h84nf\" (UniqueName: \"kubernetes.io/projected/0ff4f026-1e2a-44a7-848a-ccd5566ece95-kube-api-access-h84nf\") pod \"ovsdbserver-nb-0\" (UID: \"0ff4f026-1e2a-44a7-848a-ccd5566ece95\") " pod="openstack/ovsdbserver-nb-0" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.492648 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-9baa6ba5-7077-4488-814e-7bbb53e5457b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9baa6ba5-7077-4488-814e-7bbb53e5457b\") pod \"ovsdbserver-nb-0\" (UID: \"0ff4f026-1e2a-44a7-848a-ccd5566ece95\") " pod="openstack/ovsdbserver-nb-0" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.530747 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c62c7c4b-4dd2-40c8-b209-e29c1dfa255f-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-1\" (UID: \"c62c7c4b-4dd2-40c8-b209-e29c1dfa255f\") " pod="openstack/ovsdbserver-nb-1" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.530916 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c62c7c4b-4dd2-40c8-b209-e29c1dfa255f-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-1\" (UID: \"c62c7c4b-4dd2-40c8-b209-e29c1dfa255f\") " pod="openstack/ovsdbserver-nb-1" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.530983 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-70060294-0c27-4aa3-a402-8c53022a52e5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70060294-0c27-4aa3-a402-8c53022a52e5\") pod \"ovsdbserver-nb-2\" (UID: \"486fc164-58ae-47f8-bcd1-2c98e7c12b8f\") " pod="openstack/ovsdbserver-nb-2" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.531090 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c62c7c4b-4dd2-40c8-b209-e29c1dfa255f-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"c62c7c4b-4dd2-40c8-b209-e29c1dfa255f\") " pod="openstack/ovsdbserver-nb-1" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.531153 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c62c7c4b-4dd2-40c8-b209-e29c1dfa255f-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"c62c7c4b-4dd2-40c8-b209-e29c1dfa255f\") " pod="openstack/ovsdbserver-nb-1" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.531244 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/486fc164-58ae-47f8-bcd1-2c98e7c12b8f-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"486fc164-58ae-47f8-bcd1-2c98e7c12b8f\") " pod="openstack/ovsdbserver-nb-2" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.531313 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmrqv\" (UniqueName: \"kubernetes.io/projected/c62c7c4b-4dd2-40c8-b209-e29c1dfa255f-kube-api-access-mmrqv\") pod \"ovsdbserver-nb-1\" (UID: \"c62c7c4b-4dd2-40c8-b209-e29c1dfa255f\") " pod="openstack/ovsdbserver-nb-1" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.531499 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c62c7c4b-4dd2-40c8-b209-e29c1dfa255f-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"c62c7c4b-4dd2-40c8-b209-e29c1dfa255f\") " pod="openstack/ovsdbserver-nb-1" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.531587 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-eba59b9b-8af0-453c-87a8-a7b3651daf02\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-eba59b9b-8af0-453c-87a8-a7b3651daf02\") pod \"ovsdbserver-nb-1\" (UID: \"c62c7c4b-4dd2-40c8-b209-e29c1dfa255f\") " pod="openstack/ovsdbserver-nb-1" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.531637 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-slfsh\" (UniqueName: \"kubernetes.io/projected/486fc164-58ae-47f8-bcd1-2c98e7c12b8f-kube-api-access-slfsh\") pod \"ovsdbserver-nb-2\" (UID: \"486fc164-58ae-47f8-bcd1-2c98e7c12b8f\") " pod="openstack/ovsdbserver-nb-2" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.531744 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c62c7c4b-4dd2-40c8-b209-e29c1dfa255f-config\") pod \"ovsdbserver-nb-1\" (UID: \"c62c7c4b-4dd2-40c8-b209-e29c1dfa255f\") " pod="openstack/ovsdbserver-nb-1" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.531883 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/486fc164-58ae-47f8-bcd1-2c98e7c12b8f-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-2\" (UID: \"486fc164-58ae-47f8-bcd1-2c98e7c12b8f\") " pod="openstack/ovsdbserver-nb-2" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.531954 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/486fc164-58ae-47f8-bcd1-2c98e7c12b8f-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"486fc164-58ae-47f8-bcd1-2c98e7c12b8f\") " pod="openstack/ovsdbserver-nb-2" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.532009 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/486fc164-58ae-47f8-bcd1-2c98e7c12b8f-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-2\" (UID: \"486fc164-58ae-47f8-bcd1-2c98e7c12b8f\") " pod="openstack/ovsdbserver-nb-2" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.532101 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/486fc164-58ae-47f8-bcd1-2c98e7c12b8f-config\") pod \"ovsdbserver-nb-2\" (UID: \"486fc164-58ae-47f8-bcd1-2c98e7c12b8f\") " pod="openstack/ovsdbserver-nb-2" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.532156 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c62c7c4b-4dd2-40c8-b209-e29c1dfa255f-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"c62c7c4b-4dd2-40c8-b209-e29c1dfa255f\") " pod="openstack/ovsdbserver-nb-1" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.532541 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c62c7c4b-4dd2-40c8-b209-e29c1dfa255f-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"c62c7c4b-4dd2-40c8-b209-e29c1dfa255f\") " pod="openstack/ovsdbserver-nb-1" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.532171 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/486fc164-58ae-47f8-bcd1-2c98e7c12b8f-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"486fc164-58ae-47f8-bcd1-2c98e7c12b8f\") " pod="openstack/ovsdbserver-nb-2" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.532896 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/486fc164-58ae-47f8-bcd1-2c98e7c12b8f-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"486fc164-58ae-47f8-bcd1-2c98e7c12b8f\") " pod="openstack/ovsdbserver-nb-2" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.534212 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/486fc164-58ae-47f8-bcd1-2c98e7c12b8f-config\") pod \"ovsdbserver-nb-2\" (UID: \"486fc164-58ae-47f8-bcd1-2c98e7c12b8f\") " pod="openstack/ovsdbserver-nb-2" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.534484 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c62c7c4b-4dd2-40c8-b209-e29c1dfa255f-config\") pod \"ovsdbserver-nb-1\" (UID: \"c62c7c4b-4dd2-40c8-b209-e29c1dfa255f\") " pod="openstack/ovsdbserver-nb-1" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.534965 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/486fc164-58ae-47f8-bcd1-2c98e7c12b8f-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"486fc164-58ae-47f8-bcd1-2c98e7c12b8f\") " pod="openstack/ovsdbserver-nb-2" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.538453 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c62c7c4b-4dd2-40c8-b209-e29c1dfa255f-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-1\" (UID: \"c62c7c4b-4dd2-40c8-b209-e29c1dfa255f\") " pod="openstack/ovsdbserver-nb-1" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.538781 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/486fc164-58ae-47f8-bcd1-2c98e7c12b8f-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-2\" (UID: \"486fc164-58ae-47f8-bcd1-2c98e7c12b8f\") " pod="openstack/ovsdbserver-nb-2" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.539080 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c62c7c4b-4dd2-40c8-b209-e29c1dfa255f-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-1\" (UID: \"c62c7c4b-4dd2-40c8-b209-e29c1dfa255f\") " pod="openstack/ovsdbserver-nb-1" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.540445 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/486fc164-58ae-47f8-bcd1-2c98e7c12b8f-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"486fc164-58ae-47f8-bcd1-2c98e7c12b8f\") " pod="openstack/ovsdbserver-nb-2" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.542177 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/486fc164-58ae-47f8-bcd1-2c98e7c12b8f-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-2\" (UID: \"486fc164-58ae-47f8-bcd1-2c98e7c12b8f\") " pod="openstack/ovsdbserver-nb-2" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.542720 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c62c7c4b-4dd2-40c8-b209-e29c1dfa255f-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"c62c7c4b-4dd2-40c8-b209-e29c1dfa255f\") " pod="openstack/ovsdbserver-nb-1" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.551439 5003 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.551471 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-eba59b9b-8af0-453c-87a8-a7b3651daf02\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-eba59b9b-8af0-453c-87a8-a7b3651daf02\") pod \"ovsdbserver-nb-1\" (UID: \"c62c7c4b-4dd2-40c8-b209-e29c1dfa255f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/7ecb8482242d0657acef672ad1fe555c493b0c0f2446c226a6b16a0975e8943c/globalmount\"" pod="openstack/ovsdbserver-nb-1" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.551814 5003 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.551875 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-70060294-0c27-4aa3-a402-8c53022a52e5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70060294-0c27-4aa3-a402-8c53022a52e5\") pod \"ovsdbserver-nb-2\" (UID: \"486fc164-58ae-47f8-bcd1-2c98e7c12b8f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/072ee021e2444cc24ff2a7307c2530d9f4e6ac78f0338fbcdbb57ec097972f56/globalmount\"" pod="openstack/ovsdbserver-nb-2" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.557601 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmrqv\" (UniqueName: \"kubernetes.io/projected/c62c7c4b-4dd2-40c8-b209-e29c1dfa255f-kube-api-access-mmrqv\") pod \"ovsdbserver-nb-1\" (UID: \"c62c7c4b-4dd2-40c8-b209-e29c1dfa255f\") " pod="openstack/ovsdbserver-nb-1" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.565380 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-slfsh\" (UniqueName: \"kubernetes.io/projected/486fc164-58ae-47f8-bcd1-2c98e7c12b8f-kube-api-access-slfsh\") pod \"ovsdbserver-nb-2\" (UID: \"486fc164-58ae-47f8-bcd1-2c98e7c12b8f\") " pod="openstack/ovsdbserver-nb-2" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.609488 5003 generic.go:334] "Generic (PLEG): container finished" podID="d729952a-6fc6-43dc-96b7-9a387cf92217" containerID="7530188867b264019789cb95e71b0617ac88e5bc91950354c167bfa6da99c22d" exitCode=0 Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.609577 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g26d9" event={"ID":"d729952a-6fc6-43dc-96b7-9a387cf92217","Type":"ContainerDied","Data":"7530188867b264019789cb95e71b0617ac88e5bc91950354c167bfa6da99c22d"} Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.614570 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.641537 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-70060294-0c27-4aa3-a402-8c53022a52e5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-70060294-0c27-4aa3-a402-8c53022a52e5\") pod \"ovsdbserver-nb-2\" (UID: \"486fc164-58ae-47f8-bcd1-2c98e7c12b8f\") " pod="openstack/ovsdbserver-nb-2" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.643341 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-eba59b9b-8af0-453c-87a8-a7b3651daf02\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-eba59b9b-8af0-453c-87a8-a7b3651daf02\") pod \"ovsdbserver-nb-1\" (UID: \"c62c7c4b-4dd2-40c8-b209-e29c1dfa255f\") " pod="openstack/ovsdbserver-nb-1" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.662024 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Jan 04 13:19:00 crc kubenswrapper[5003]: I0104 13:19:00.691387 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Jan 04 13:19:01 crc kubenswrapper[5003]: I0104 13:19:01.290870 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-2vwnf"] Jan 04 13:19:01 crc kubenswrapper[5003]: I0104 13:19:01.293402 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2vwnf" Jan 04 13:19:01 crc kubenswrapper[5003]: W0104 13:19:01.295069 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0ff4f026_1e2a_44a7_848a_ccd5566ece95.slice/crio-84e8a828862666bdfe8412b825c1256feb7946ef28c395f83661d988fbc36956 WatchSource:0}: Error finding container 84e8a828862666bdfe8412b825c1256feb7946ef28c395f83661d988fbc36956: Status 404 returned error can't find the container with id 84e8a828862666bdfe8412b825c1256feb7946ef28c395f83661d988fbc36956 Jan 04 13:19:01 crc kubenswrapper[5003]: I0104 13:19:01.309621 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Jan 04 13:19:01 crc kubenswrapper[5003]: I0104 13:19:01.332162 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2vwnf"] Jan 04 13:19:01 crc kubenswrapper[5003]: I0104 13:19:01.455260 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32653060-ac61-4bb1-aea3-836ee0e777cf-utilities\") pod \"redhat-marketplace-2vwnf\" (UID: \"32653060-ac61-4bb1-aea3-836ee0e777cf\") " pod="openshift-marketplace/redhat-marketplace-2vwnf" Jan 04 13:19:01 crc kubenswrapper[5003]: I0104 13:19:01.455616 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ncmmz\" (UniqueName: \"kubernetes.io/projected/32653060-ac61-4bb1-aea3-836ee0e777cf-kube-api-access-ncmmz\") pod \"redhat-marketplace-2vwnf\" (UID: \"32653060-ac61-4bb1-aea3-836ee0e777cf\") " pod="openshift-marketplace/redhat-marketplace-2vwnf" Jan 04 13:19:01 crc kubenswrapper[5003]: I0104 13:19:01.455661 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32653060-ac61-4bb1-aea3-836ee0e777cf-catalog-content\") pod \"redhat-marketplace-2vwnf\" (UID: \"32653060-ac61-4bb1-aea3-836ee0e777cf\") " pod="openshift-marketplace/redhat-marketplace-2vwnf" Jan 04 13:19:01 crc kubenswrapper[5003]: I0104 13:19:01.493171 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Jan 04 13:19:01 crc kubenswrapper[5003]: I0104 13:19:01.506383 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-lkcf7"] Jan 04 13:19:01 crc kubenswrapper[5003]: I0104 13:19:01.508387 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lkcf7" Jan 04 13:19:01 crc kubenswrapper[5003]: I0104 13:19:01.551849 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lkcf7"] Jan 04 13:19:01 crc kubenswrapper[5003]: I0104 13:19:01.569365 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b025b58a-ca68-4057-b474-e58ca758c727-utilities\") pod \"redhat-operators-lkcf7\" (UID: \"b025b58a-ca68-4057-b474-e58ca758c727\") " pod="openshift-marketplace/redhat-operators-lkcf7" Jan 04 13:19:01 crc kubenswrapper[5003]: I0104 13:19:01.569458 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b025b58a-ca68-4057-b474-e58ca758c727-catalog-content\") pod \"redhat-operators-lkcf7\" (UID: \"b025b58a-ca68-4057-b474-e58ca758c727\") " pod="openshift-marketplace/redhat-operators-lkcf7" Jan 04 13:19:01 crc kubenswrapper[5003]: I0104 13:19:01.569503 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32653060-ac61-4bb1-aea3-836ee0e777cf-utilities\") pod \"redhat-marketplace-2vwnf\" (UID: \"32653060-ac61-4bb1-aea3-836ee0e777cf\") " pod="openshift-marketplace/redhat-marketplace-2vwnf" Jan 04 13:19:01 crc kubenswrapper[5003]: I0104 13:19:01.569543 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n84k7\" (UniqueName: \"kubernetes.io/projected/b025b58a-ca68-4057-b474-e58ca758c727-kube-api-access-n84k7\") pod \"redhat-operators-lkcf7\" (UID: \"b025b58a-ca68-4057-b474-e58ca758c727\") " pod="openshift-marketplace/redhat-operators-lkcf7" Jan 04 13:19:01 crc kubenswrapper[5003]: I0104 13:19:01.569578 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ncmmz\" (UniqueName: \"kubernetes.io/projected/32653060-ac61-4bb1-aea3-836ee0e777cf-kube-api-access-ncmmz\") pod \"redhat-marketplace-2vwnf\" (UID: \"32653060-ac61-4bb1-aea3-836ee0e777cf\") " pod="openshift-marketplace/redhat-marketplace-2vwnf" Jan 04 13:19:01 crc kubenswrapper[5003]: I0104 13:19:01.569635 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32653060-ac61-4bb1-aea3-836ee0e777cf-catalog-content\") pod \"redhat-marketplace-2vwnf\" (UID: \"32653060-ac61-4bb1-aea3-836ee0e777cf\") " pod="openshift-marketplace/redhat-marketplace-2vwnf" Jan 04 13:19:01 crc kubenswrapper[5003]: I0104 13:19:01.570179 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32653060-ac61-4bb1-aea3-836ee0e777cf-catalog-content\") pod \"redhat-marketplace-2vwnf\" (UID: \"32653060-ac61-4bb1-aea3-836ee0e777cf\") " pod="openshift-marketplace/redhat-marketplace-2vwnf" Jan 04 13:19:01 crc kubenswrapper[5003]: I0104 13:19:01.570571 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32653060-ac61-4bb1-aea3-836ee0e777cf-utilities\") pod \"redhat-marketplace-2vwnf\" (UID: \"32653060-ac61-4bb1-aea3-836ee0e777cf\") " pod="openshift-marketplace/redhat-marketplace-2vwnf" Jan 04 13:19:01 crc kubenswrapper[5003]: I0104 13:19:01.620838 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ncmmz\" (UniqueName: \"kubernetes.io/projected/32653060-ac61-4bb1-aea3-836ee0e777cf-kube-api-access-ncmmz\") pod \"redhat-marketplace-2vwnf\" (UID: \"32653060-ac61-4bb1-aea3-836ee0e777cf\") " pod="openshift-marketplace/redhat-marketplace-2vwnf" Jan 04 13:19:01 crc kubenswrapper[5003]: I0104 13:19:01.635812 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2vwnf" Jan 04 13:19:01 crc kubenswrapper[5003]: I0104 13:19:01.653970 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g26d9" event={"ID":"d729952a-6fc6-43dc-96b7-9a387cf92217","Type":"ContainerStarted","Data":"699b862039f11a9a09809d8cebbaae3ed8cc190bc22508e6bf8b83d05525a823"} Jan 04 13:19:01 crc kubenswrapper[5003]: I0104 13:19:01.672343 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b025b58a-ca68-4057-b474-e58ca758c727-utilities\") pod \"redhat-operators-lkcf7\" (UID: \"b025b58a-ca68-4057-b474-e58ca758c727\") " pod="openshift-marketplace/redhat-operators-lkcf7" Jan 04 13:19:01 crc kubenswrapper[5003]: I0104 13:19:01.672435 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b025b58a-ca68-4057-b474-e58ca758c727-catalog-content\") pod \"redhat-operators-lkcf7\" (UID: \"b025b58a-ca68-4057-b474-e58ca758c727\") " pod="openshift-marketplace/redhat-operators-lkcf7" Jan 04 13:19:01 crc kubenswrapper[5003]: I0104 13:19:01.672471 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n84k7\" (UniqueName: \"kubernetes.io/projected/b025b58a-ca68-4057-b474-e58ca758c727-kube-api-access-n84k7\") pod \"redhat-operators-lkcf7\" (UID: \"b025b58a-ca68-4057-b474-e58ca758c727\") " pod="openshift-marketplace/redhat-operators-lkcf7" Jan 04 13:19:01 crc kubenswrapper[5003]: I0104 13:19:01.673248 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b025b58a-ca68-4057-b474-e58ca758c727-utilities\") pod \"redhat-operators-lkcf7\" (UID: \"b025b58a-ca68-4057-b474-e58ca758c727\") " pod="openshift-marketplace/redhat-operators-lkcf7" Jan 04 13:19:01 crc kubenswrapper[5003]: I0104 13:19:01.673489 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b025b58a-ca68-4057-b474-e58ca758c727-catalog-content\") pod \"redhat-operators-lkcf7\" (UID: \"b025b58a-ca68-4057-b474-e58ca758c727\") " pod="openshift-marketplace/redhat-operators-lkcf7" Jan 04 13:19:01 crc kubenswrapper[5003]: I0104 13:19:01.686417 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"0ff4f026-1e2a-44a7-848a-ccd5566ece95","Type":"ContainerStarted","Data":"84e8a828862666bdfe8412b825c1256feb7946ef28c395f83661d988fbc36956"} Jan 04 13:19:01 crc kubenswrapper[5003]: I0104 13:19:01.728584 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n84k7\" (UniqueName: \"kubernetes.io/projected/b025b58a-ca68-4057-b474-e58ca758c727-kube-api-access-n84k7\") pod \"redhat-operators-lkcf7\" (UID: \"b025b58a-ca68-4057-b474-e58ca758c727\") " pod="openshift-marketplace/redhat-operators-lkcf7" Jan 04 13:19:01 crc kubenswrapper[5003]: I0104 13:19:01.739960 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"486fc164-58ae-47f8-bcd1-2c98e7c12b8f","Type":"ContainerStarted","Data":"a9207d3abf9ba2017d93dd77eceac4ecd3b38f55985c7ed73fe45443bf1a703a"} Jan 04 13:19:01 crc kubenswrapper[5003]: I0104 13:19:01.959638 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lkcf7" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.018907 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2vwnf"] Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.047851 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Jan 04 13:19:02 crc kubenswrapper[5003]: W0104 13:19:02.074243 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc62c7c4b_4dd2_40c8_b209_e29c1dfa255f.slice/crio-12d284cc840d3c0437b1fde53c37e44ce1285f02b4fabf9eff5f64442328bc96 WatchSource:0}: Error finding container 12d284cc840d3c0437b1fde53c37e44ce1285f02b4fabf9eff5f64442328bc96: Status 404 returned error can't find the container with id 12d284cc840d3c0437b1fde53c37e44ce1285f02b4fabf9eff5f64442328bc96 Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.264498 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.266326 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.273719 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-h5vft" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.273950 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.274139 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.274697 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.289659 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-2"] Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.291519 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.327998 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-1"] Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.336753 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.356295 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.372940 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.381733 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.392404 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfffcf93-0d03-45b0-ad26-ed2c799360f0-config\") pod \"ovsdbserver-sb-0\" (UID: \"bfffcf93-0d03-45b0-ad26-ed2c799360f0\") " pod="openstack/ovsdbserver-sb-0" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.392465 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2tt4\" (UniqueName: \"kubernetes.io/projected/ad42204b-8d7a-415b-a30c-2e8e4fb242d8-kube-api-access-w2tt4\") pod \"ovsdbserver-sb-2\" (UID: \"ad42204b-8d7a-415b-a30c-2e8e4fb242d8\") " pod="openstack/ovsdbserver-sb-2" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.392510 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bfffcf93-0d03-45b0-ad26-ed2c799360f0-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"bfffcf93-0d03-45b0-ad26-ed2c799360f0\") " pod="openstack/ovsdbserver-sb-0" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.392530 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfffcf93-0d03-45b0-ad26-ed2c799360f0-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"bfffcf93-0d03-45b0-ad26-ed2c799360f0\") " pod="openstack/ovsdbserver-sb-0" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.392553 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad42204b-8d7a-415b-a30c-2e8e4fb242d8-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-2\" (UID: \"ad42204b-8d7a-415b-a30c-2e8e4fb242d8\") " pod="openstack/ovsdbserver-sb-2" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.392609 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ad42204b-8d7a-415b-a30c-2e8e4fb242d8-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"ad42204b-8d7a-415b-a30c-2e8e4fb242d8\") " pod="openstack/ovsdbserver-sb-2" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.392664 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bfffcf93-0d03-45b0-ad26-ed2c799360f0-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"bfffcf93-0d03-45b0-ad26-ed2c799360f0\") " pod="openstack/ovsdbserver-sb-0" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.392689 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfffcf93-0d03-45b0-ad26-ed2c799360f0-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"bfffcf93-0d03-45b0-ad26-ed2c799360f0\") " pod="openstack/ovsdbserver-sb-0" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.392870 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-57fba318-750f-41c7-8ac7-e99765986da7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-57fba318-750f-41c7-8ac7-e99765986da7\") pod \"ovsdbserver-sb-0\" (UID: \"bfffcf93-0d03-45b0-ad26-ed2c799360f0\") " pod="openstack/ovsdbserver-sb-0" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.392949 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ad42204b-8d7a-415b-a30c-2e8e4fb242d8-config\") pod \"ovsdbserver-sb-2\" (UID: \"ad42204b-8d7a-415b-a30c-2e8e4fb242d8\") " pod="openstack/ovsdbserver-sb-2" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.393097 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-30b34e39-76d8-475f-b105-3995da5f480c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-30b34e39-76d8-475f-b105-3995da5f480c\") pod \"ovsdbserver-sb-2\" (UID: \"ad42204b-8d7a-415b-a30c-2e8e4fb242d8\") " pod="openstack/ovsdbserver-sb-2" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.393139 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad42204b-8d7a-415b-a30c-2e8e4fb242d8-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-2\" (UID: \"ad42204b-8d7a-415b-a30c-2e8e4fb242d8\") " pod="openstack/ovsdbserver-sb-2" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.393193 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfffcf93-0d03-45b0-ad26-ed2c799360f0-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"bfffcf93-0d03-45b0-ad26-ed2c799360f0\") " pod="openstack/ovsdbserver-sb-0" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.393268 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ad42204b-8d7a-415b-a30c-2e8e4fb242d8-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"ad42204b-8d7a-415b-a30c-2e8e4fb242d8\") " pod="openstack/ovsdbserver-sb-2" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.393291 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad42204b-8d7a-415b-a30c-2e8e4fb242d8-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"ad42204b-8d7a-415b-a30c-2e8e4fb242d8\") " pod="openstack/ovsdbserver-sb-2" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.393319 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5qld\" (UniqueName: \"kubernetes.io/projected/bfffcf93-0d03-45b0-ad26-ed2c799360f0-kube-api-access-h5qld\") pod \"ovsdbserver-sb-0\" (UID: \"bfffcf93-0d03-45b0-ad26-ed2c799360f0\") " pod="openstack/ovsdbserver-sb-0" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.495579 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ad42204b-8d7a-415b-a30c-2e8e4fb242d8-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"ad42204b-8d7a-415b-a30c-2e8e4fb242d8\") " pod="openstack/ovsdbserver-sb-2" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.496005 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bfffcf93-0d03-45b0-ad26-ed2c799360f0-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"bfffcf93-0d03-45b0-ad26-ed2c799360f0\") " pod="openstack/ovsdbserver-sb-0" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.496053 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfffcf93-0d03-45b0-ad26-ed2c799360f0-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"bfffcf93-0d03-45b0-ad26-ed2c799360f0\") " pod="openstack/ovsdbserver-sb-0" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.496089 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-57fba318-750f-41c7-8ac7-e99765986da7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-57fba318-750f-41c7-8ac7-e99765986da7\") pod \"ovsdbserver-sb-0\" (UID: \"bfffcf93-0d03-45b0-ad26-ed2c799360f0\") " pod="openstack/ovsdbserver-sb-0" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.496137 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ad42204b-8d7a-415b-a30c-2e8e4fb242d8-config\") pod \"ovsdbserver-sb-2\" (UID: \"ad42204b-8d7a-415b-a30c-2e8e4fb242d8\") " pod="openstack/ovsdbserver-sb-2" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.496189 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-30b34e39-76d8-475f-b105-3995da5f480c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-30b34e39-76d8-475f-b105-3995da5f480c\") pod \"ovsdbserver-sb-2\" (UID: \"ad42204b-8d7a-415b-a30c-2e8e4fb242d8\") " pod="openstack/ovsdbserver-sb-2" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.496218 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtchb\" (UniqueName: \"kubernetes.io/projected/9f3c6348-1679-407c-92e7-b8c0afa0591b-kube-api-access-wtchb\") pod \"ovsdbserver-sb-1\" (UID: \"9f3c6348-1679-407c-92e7-b8c0afa0591b\") " pod="openstack/ovsdbserver-sb-1" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.496237 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad42204b-8d7a-415b-a30c-2e8e4fb242d8-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-2\" (UID: \"ad42204b-8d7a-415b-a30c-2e8e4fb242d8\") " pod="openstack/ovsdbserver-sb-2" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.496282 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfffcf93-0d03-45b0-ad26-ed2c799360f0-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"bfffcf93-0d03-45b0-ad26-ed2c799360f0\") " pod="openstack/ovsdbserver-sb-0" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.496313 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ad42204b-8d7a-415b-a30c-2e8e4fb242d8-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"ad42204b-8d7a-415b-a30c-2e8e4fb242d8\") " pod="openstack/ovsdbserver-sb-2" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.496348 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad42204b-8d7a-415b-a30c-2e8e4fb242d8-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"ad42204b-8d7a-415b-a30c-2e8e4fb242d8\") " pod="openstack/ovsdbserver-sb-2" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.496368 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5qld\" (UniqueName: \"kubernetes.io/projected/bfffcf93-0d03-45b0-ad26-ed2c799360f0-kube-api-access-h5qld\") pod \"ovsdbserver-sb-0\" (UID: \"bfffcf93-0d03-45b0-ad26-ed2c799360f0\") " pod="openstack/ovsdbserver-sb-0" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.496390 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfffcf93-0d03-45b0-ad26-ed2c799360f0-config\") pod \"ovsdbserver-sb-0\" (UID: \"bfffcf93-0d03-45b0-ad26-ed2c799360f0\") " pod="openstack/ovsdbserver-sb-0" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.496424 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9f3c6348-1679-407c-92e7-b8c0afa0591b-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"9f3c6348-1679-407c-92e7-b8c0afa0591b\") " pod="openstack/ovsdbserver-sb-1" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.496452 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2tt4\" (UniqueName: \"kubernetes.io/projected/ad42204b-8d7a-415b-a30c-2e8e4fb242d8-kube-api-access-w2tt4\") pod \"ovsdbserver-sb-2\" (UID: \"ad42204b-8d7a-415b-a30c-2e8e4fb242d8\") " pod="openstack/ovsdbserver-sb-2" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.496477 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f3c6348-1679-407c-92e7-b8c0afa0591b-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"9f3c6348-1679-407c-92e7-b8c0afa0591b\") " pod="openstack/ovsdbserver-sb-1" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.496511 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9f3c6348-1679-407c-92e7-b8c0afa0591b-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"9f3c6348-1679-407c-92e7-b8c0afa0591b\") " pod="openstack/ovsdbserver-sb-1" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.496543 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bfffcf93-0d03-45b0-ad26-ed2c799360f0-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"bfffcf93-0d03-45b0-ad26-ed2c799360f0\") " pod="openstack/ovsdbserver-sb-0" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.496560 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfffcf93-0d03-45b0-ad26-ed2c799360f0-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"bfffcf93-0d03-45b0-ad26-ed2c799360f0\") " pod="openstack/ovsdbserver-sb-0" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.497374 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad42204b-8d7a-415b-a30c-2e8e4fb242d8-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-2\" (UID: \"ad42204b-8d7a-415b-a30c-2e8e4fb242d8\") " pod="openstack/ovsdbserver-sb-2" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.497417 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-cbc20b3a-10fb-4b05-9085-bf8e4dff085c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cbc20b3a-10fb-4b05-9085-bf8e4dff085c\") pod \"ovsdbserver-sb-1\" (UID: \"9f3c6348-1679-407c-92e7-b8c0afa0591b\") " pod="openstack/ovsdbserver-sb-1" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.497442 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f3c6348-1679-407c-92e7-b8c0afa0591b-config\") pod \"ovsdbserver-sb-1\" (UID: \"9f3c6348-1679-407c-92e7-b8c0afa0591b\") " pod="openstack/ovsdbserver-sb-1" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.497648 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f3c6348-1679-407c-92e7-b8c0afa0591b-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-1\" (UID: \"9f3c6348-1679-407c-92e7-b8c0afa0591b\") " pod="openstack/ovsdbserver-sb-1" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.497674 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f3c6348-1679-407c-92e7-b8c0afa0591b-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-1\" (UID: \"9f3c6348-1679-407c-92e7-b8c0afa0591b\") " pod="openstack/ovsdbserver-sb-1" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.499516 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ad42204b-8d7a-415b-a30c-2e8e4fb242d8-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"ad42204b-8d7a-415b-a30c-2e8e4fb242d8\") " pod="openstack/ovsdbserver-sb-2" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.500228 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bfffcf93-0d03-45b0-ad26-ed2c799360f0-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"bfffcf93-0d03-45b0-ad26-ed2c799360f0\") " pod="openstack/ovsdbserver-sb-0" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.501636 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfffcf93-0d03-45b0-ad26-ed2c799360f0-config\") pod \"ovsdbserver-sb-0\" (UID: \"bfffcf93-0d03-45b0-ad26-ed2c799360f0\") " pod="openstack/ovsdbserver-sb-0" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.504109 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bfffcf93-0d03-45b0-ad26-ed2c799360f0-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"bfffcf93-0d03-45b0-ad26-ed2c799360f0\") " pod="openstack/ovsdbserver-sb-0" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.505064 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfffcf93-0d03-45b0-ad26-ed2c799360f0-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"bfffcf93-0d03-45b0-ad26-ed2c799360f0\") " pod="openstack/ovsdbserver-sb-0" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.509339 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfffcf93-0d03-45b0-ad26-ed2c799360f0-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"bfffcf93-0d03-45b0-ad26-ed2c799360f0\") " pod="openstack/ovsdbserver-sb-0" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.512461 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ad42204b-8d7a-415b-a30c-2e8e4fb242d8-config\") pod \"ovsdbserver-sb-2\" (UID: \"ad42204b-8d7a-415b-a30c-2e8e4fb242d8\") " pod="openstack/ovsdbserver-sb-2" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.514392 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad42204b-8d7a-415b-a30c-2e8e4fb242d8-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-2\" (UID: \"ad42204b-8d7a-415b-a30c-2e8e4fb242d8\") " pod="openstack/ovsdbserver-sb-2" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.516783 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad42204b-8d7a-415b-a30c-2e8e4fb242d8-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-2\" (UID: \"ad42204b-8d7a-415b-a30c-2e8e4fb242d8\") " pod="openstack/ovsdbserver-sb-2" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.517511 5003 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.517539 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-30b34e39-76d8-475f-b105-3995da5f480c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-30b34e39-76d8-475f-b105-3995da5f480c\") pod \"ovsdbserver-sb-2\" (UID: \"ad42204b-8d7a-415b-a30c-2e8e4fb242d8\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/2aa349bf5a4c96a9e4f6ea3c1070961688c08e334e8cadc5cbfe736d3d96bc06/globalmount\"" pod="openstack/ovsdbserver-sb-2" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.517630 5003 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.517671 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-57fba318-750f-41c7-8ac7-e99765986da7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-57fba318-750f-41c7-8ac7-e99765986da7\") pod \"ovsdbserver-sb-0\" (UID: \"bfffcf93-0d03-45b0-ad26-ed2c799360f0\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/22ca2c2814e14a7984915581d6ec7e267d65a864ab4580e97e90976fde55544c/globalmount\"" pod="openstack/ovsdbserver-sb-0" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.521765 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfffcf93-0d03-45b0-ad26-ed2c799360f0-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"bfffcf93-0d03-45b0-ad26-ed2c799360f0\") " pod="openstack/ovsdbserver-sb-0" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.524912 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad42204b-8d7a-415b-a30c-2e8e4fb242d8-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"ad42204b-8d7a-415b-a30c-2e8e4fb242d8\") " pod="openstack/ovsdbserver-sb-2" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.525627 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ad42204b-8d7a-415b-a30c-2e8e4fb242d8-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"ad42204b-8d7a-415b-a30c-2e8e4fb242d8\") " pod="openstack/ovsdbserver-sb-2" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.537354 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2tt4\" (UniqueName: \"kubernetes.io/projected/ad42204b-8d7a-415b-a30c-2e8e4fb242d8-kube-api-access-w2tt4\") pod \"ovsdbserver-sb-2\" (UID: \"ad42204b-8d7a-415b-a30c-2e8e4fb242d8\") " pod="openstack/ovsdbserver-sb-2" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.538764 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5qld\" (UniqueName: \"kubernetes.io/projected/bfffcf93-0d03-45b0-ad26-ed2c799360f0-kube-api-access-h5qld\") pod \"ovsdbserver-sb-0\" (UID: \"bfffcf93-0d03-45b0-ad26-ed2c799360f0\") " pod="openstack/ovsdbserver-sb-0" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.590635 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-57fba318-750f-41c7-8ac7-e99765986da7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-57fba318-750f-41c7-8ac7-e99765986da7\") pod \"ovsdbserver-sb-0\" (UID: \"bfffcf93-0d03-45b0-ad26-ed2c799360f0\") " pod="openstack/ovsdbserver-sb-0" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.599558 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtchb\" (UniqueName: \"kubernetes.io/projected/9f3c6348-1679-407c-92e7-b8c0afa0591b-kube-api-access-wtchb\") pod \"ovsdbserver-sb-1\" (UID: \"9f3c6348-1679-407c-92e7-b8c0afa0591b\") " pod="openstack/ovsdbserver-sb-1" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.599654 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9f3c6348-1679-407c-92e7-b8c0afa0591b-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"9f3c6348-1679-407c-92e7-b8c0afa0591b\") " pod="openstack/ovsdbserver-sb-1" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.599693 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f3c6348-1679-407c-92e7-b8c0afa0591b-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"9f3c6348-1679-407c-92e7-b8c0afa0591b\") " pod="openstack/ovsdbserver-sb-1" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.599713 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9f3c6348-1679-407c-92e7-b8c0afa0591b-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"9f3c6348-1679-407c-92e7-b8c0afa0591b\") " pod="openstack/ovsdbserver-sb-1" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.599774 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-cbc20b3a-10fb-4b05-9085-bf8e4dff085c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cbc20b3a-10fb-4b05-9085-bf8e4dff085c\") pod \"ovsdbserver-sb-1\" (UID: \"9f3c6348-1679-407c-92e7-b8c0afa0591b\") " pod="openstack/ovsdbserver-sb-1" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.599799 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f3c6348-1679-407c-92e7-b8c0afa0591b-config\") pod \"ovsdbserver-sb-1\" (UID: \"9f3c6348-1679-407c-92e7-b8c0afa0591b\") " pod="openstack/ovsdbserver-sb-1" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.599826 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f3c6348-1679-407c-92e7-b8c0afa0591b-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-1\" (UID: \"9f3c6348-1679-407c-92e7-b8c0afa0591b\") " pod="openstack/ovsdbserver-sb-1" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.599849 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f3c6348-1679-407c-92e7-b8c0afa0591b-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-1\" (UID: \"9f3c6348-1679-407c-92e7-b8c0afa0591b\") " pod="openstack/ovsdbserver-sb-1" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.607162 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-30b34e39-76d8-475f-b105-3995da5f480c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-30b34e39-76d8-475f-b105-3995da5f480c\") pod \"ovsdbserver-sb-2\" (UID: \"ad42204b-8d7a-415b-a30c-2e8e4fb242d8\") " pod="openstack/ovsdbserver-sb-2" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.608297 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9f3c6348-1679-407c-92e7-b8c0afa0591b-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"9f3c6348-1679-407c-92e7-b8c0afa0591b\") " pod="openstack/ovsdbserver-sb-1" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.609313 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9f3c6348-1679-407c-92e7-b8c0afa0591b-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"9f3c6348-1679-407c-92e7-b8c0afa0591b\") " pod="openstack/ovsdbserver-sb-1" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.612689 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f3c6348-1679-407c-92e7-b8c0afa0591b-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"9f3c6348-1679-407c-92e7-b8c0afa0591b\") " pod="openstack/ovsdbserver-sb-1" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.613467 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f3c6348-1679-407c-92e7-b8c0afa0591b-config\") pod \"ovsdbserver-sb-1\" (UID: \"9f3c6348-1679-407c-92e7-b8c0afa0591b\") " pod="openstack/ovsdbserver-sb-1" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.624835 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f3c6348-1679-407c-92e7-b8c0afa0591b-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-1\" (UID: \"9f3c6348-1679-407c-92e7-b8c0afa0591b\") " pod="openstack/ovsdbserver-sb-1" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.629346 5003 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.629386 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-cbc20b3a-10fb-4b05-9085-bf8e4dff085c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cbc20b3a-10fb-4b05-9085-bf8e4dff085c\") pod \"ovsdbserver-sb-1\" (UID: \"9f3c6348-1679-407c-92e7-b8c0afa0591b\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/b8a017b6186312567833a2849ed451dd935384b679215fc11acc247aa2c34cbf/globalmount\"" pod="openstack/ovsdbserver-sb-1" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.629912 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f3c6348-1679-407c-92e7-b8c0afa0591b-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-1\" (UID: \"9f3c6348-1679-407c-92e7-b8c0afa0591b\") " pod="openstack/ovsdbserver-sb-1" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.632085 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtchb\" (UniqueName: \"kubernetes.io/projected/9f3c6348-1679-407c-92e7-b8c0afa0591b-kube-api-access-wtchb\") pod \"ovsdbserver-sb-1\" (UID: \"9f3c6348-1679-407c-92e7-b8c0afa0591b\") " pod="openstack/ovsdbserver-sb-1" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.664583 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.672532 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-cbc20b3a-10fb-4b05-9085-bf8e4dff085c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cbc20b3a-10fb-4b05-9085-bf8e4dff085c\") pod \"ovsdbserver-sb-1\" (UID: \"9f3c6348-1679-407c-92e7-b8c0afa0591b\") " pod="openstack/ovsdbserver-sb-1" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.682072 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.705843 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.782889 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"486fc164-58ae-47f8-bcd1-2c98e7c12b8f","Type":"ContainerStarted","Data":"de078ab9c5f61046dab8d6435cf5e6b6bb261272b601b97aab7e9b458f07461e"} Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.788065 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"486fc164-58ae-47f8-bcd1-2c98e7c12b8f","Type":"ContainerStarted","Data":"f5960fad3db16f32f746d1798162b6e2a04e610a0cc9acf6ad1f26431708564d"} Jan 04 13:19:02 crc kubenswrapper[5003]: W0104 13:19:02.792872 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb025b58a_ca68_4057_b474_e58ca758c727.slice/crio-6043afb4160d0975bf7492841e1f730a1f3816cd1d4c4e537839312d3ebf3345 WatchSource:0}: Error finding container 6043afb4160d0975bf7492841e1f730a1f3816cd1d4c4e537839312d3ebf3345: Status 404 returned error can't find the container with id 6043afb4160d0975bf7492841e1f730a1f3816cd1d4c4e537839312d3ebf3345 Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.797467 5003 generic.go:334] "Generic (PLEG): container finished" podID="d729952a-6fc6-43dc-96b7-9a387cf92217" containerID="699b862039f11a9a09809d8cebbaae3ed8cc190bc22508e6bf8b83d05525a823" exitCode=0 Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.797526 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g26d9" event={"ID":"d729952a-6fc6-43dc-96b7-9a387cf92217","Type":"ContainerDied","Data":"699b862039f11a9a09809d8cebbaae3ed8cc190bc22508e6bf8b83d05525a823"} Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.799233 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lkcf7"] Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.809600 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-2" podStartSLOduration=3.809579104 podStartE2EDuration="3.809579104s" podCreationTimestamp="2026-01-04 13:18:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 13:19:02.809266166 +0000 UTC m=+5458.282296027" watchObservedRunningTime="2026-01-04 13:19:02.809579104 +0000 UTC m=+5458.282608945" Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.860762 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"c62c7c4b-4dd2-40c8-b209-e29c1dfa255f","Type":"ContainerStarted","Data":"f29bef9a2d9a8d4259cc008ebe1bc931ce9b45fa8c4a699079dd8d789062f808"} Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.860805 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"c62c7c4b-4dd2-40c8-b209-e29c1dfa255f","Type":"ContainerStarted","Data":"12d284cc840d3c0437b1fde53c37e44ce1285f02b4fabf9eff5f64442328bc96"} Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.860815 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"0ff4f026-1e2a-44a7-848a-ccd5566ece95","Type":"ContainerStarted","Data":"b6a0dfb7acc9c9de37416a8ffad0a1590b7aae5622fbec27be07cc73b7b76833"} Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.860826 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"0ff4f026-1e2a-44a7-848a-ccd5566ece95","Type":"ContainerStarted","Data":"e4c90c3017c6d6c7fbd546902f0fd2dc62d4c230bf7679a7791823dae6584170"} Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.869289 5003 generic.go:334] "Generic (PLEG): container finished" podID="32653060-ac61-4bb1-aea3-836ee0e777cf" containerID="a2a9887b514c070ce3ed6faff8b685c695e3ad1bd22e8c70468e23cbea93f9b5" exitCode=0 Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.869376 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2vwnf" event={"ID":"32653060-ac61-4bb1-aea3-836ee0e777cf","Type":"ContainerDied","Data":"a2a9887b514c070ce3ed6faff8b685c695e3ad1bd22e8c70468e23cbea93f9b5"} Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.869413 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2vwnf" event={"ID":"32653060-ac61-4bb1-aea3-836ee0e777cf","Type":"ContainerStarted","Data":"8504d787a3eb52a5beb5ada22cfa082deaa6fb93d6b1914f93c3bee776401b4d"} Jan 04 13:19:02 crc kubenswrapper[5003]: I0104 13:19:02.892577 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=3.8925517530000002 podStartE2EDuration="3.892551753s" podCreationTimestamp="2026-01-04 13:18:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 13:19:02.883347899 +0000 UTC m=+5458.356377750" watchObservedRunningTime="2026-01-04 13:19:02.892551753 +0000 UTC m=+5458.365581594" Jan 04 13:19:03 crc kubenswrapper[5003]: I0104 13:19:03.303244 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Jan 04 13:19:03 crc kubenswrapper[5003]: I0104 13:19:03.417963 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Jan 04 13:19:03 crc kubenswrapper[5003]: I0104 13:19:03.565855 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Jan 04 13:19:03 crc kubenswrapper[5003]: I0104 13:19:03.617095 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Jan 04 13:19:03 crc kubenswrapper[5003]: I0104 13:19:03.691512 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-2" Jan 04 13:19:03 crc kubenswrapper[5003]: I0104 13:19:03.905051 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"c62c7c4b-4dd2-40c8-b209-e29c1dfa255f","Type":"ContainerStarted","Data":"cfa38b45b5c22199647087f1eeea2cfe12f48d82c30501ed14c610c9f04c9992"} Jan 04 13:19:03 crc kubenswrapper[5003]: I0104 13:19:03.909444 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"9f3c6348-1679-407c-92e7-b8c0afa0591b","Type":"ContainerStarted","Data":"e93a164198d0651b0037542bb44aa16ee9ab6581dea1b4a495828685e47f128e"} Jan 04 13:19:03 crc kubenswrapper[5003]: I0104 13:19:03.909505 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"9f3c6348-1679-407c-92e7-b8c0afa0591b","Type":"ContainerStarted","Data":"c4a78ed5527f3a7ca3e8c195640c1a61dd116d98017697afa0dd0e2f03e1ab58"} Jan 04 13:19:03 crc kubenswrapper[5003]: I0104 13:19:03.916660 5003 generic.go:334] "Generic (PLEG): container finished" podID="b025b58a-ca68-4057-b474-e58ca758c727" containerID="0e34f10b6a614f8be35059aa13204147ff2b89620d9a74cc026712a2c7dc78a0" exitCode=0 Jan 04 13:19:03 crc kubenswrapper[5003]: I0104 13:19:03.916725 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lkcf7" event={"ID":"b025b58a-ca68-4057-b474-e58ca758c727","Type":"ContainerDied","Data":"0e34f10b6a614f8be35059aa13204147ff2b89620d9a74cc026712a2c7dc78a0"} Jan 04 13:19:03 crc kubenswrapper[5003]: I0104 13:19:03.916746 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lkcf7" event={"ID":"b025b58a-ca68-4057-b474-e58ca758c727","Type":"ContainerStarted","Data":"6043afb4160d0975bf7492841e1f730a1f3816cd1d4c4e537839312d3ebf3345"} Jan 04 13:19:03 crc kubenswrapper[5003]: I0104 13:19:03.944805 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-1" podStartSLOduration=4.944772843 podStartE2EDuration="4.944772843s" podCreationTimestamp="2026-01-04 13:18:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 13:19:03.934681036 +0000 UTC m=+5459.407710877" watchObservedRunningTime="2026-01-04 13:19:03.944772843 +0000 UTC m=+5459.417802694" Jan 04 13:19:03 crc kubenswrapper[5003]: I0104 13:19:03.948364 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"ad42204b-8d7a-415b-a30c-2e8e4fb242d8","Type":"ContainerStarted","Data":"a68f6c08a9b55ef0e565592499cf585b03800c4a8c32e04b8f7e22f6895ff433"} Jan 04 13:19:03 crc kubenswrapper[5003]: I0104 13:19:03.948434 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"ad42204b-8d7a-415b-a30c-2e8e4fb242d8","Type":"ContainerStarted","Data":"4e5e0d0ba51df44feb2a8db922f2eae072d55666c6fe578782c5c5f865935fdb"} Jan 04 13:19:03 crc kubenswrapper[5003]: I0104 13:19:03.955495 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"bfffcf93-0d03-45b0-ad26-ed2c799360f0","Type":"ContainerStarted","Data":"630bd6ffc24c82a2a771fef6b28d25c8e8f40e2e4cc8b3c15b47e9fe33a78661"} Jan 04 13:19:03 crc kubenswrapper[5003]: I0104 13:19:03.955566 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"bfffcf93-0d03-45b0-ad26-ed2c799360f0","Type":"ContainerStarted","Data":"82b70d4c9500416db634e4b90de81ef12142e6e056eb21c324c3ae3a7d57a75e"} Jan 04 13:19:03 crc kubenswrapper[5003]: I0104 13:19:03.981692 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g26d9" event={"ID":"d729952a-6fc6-43dc-96b7-9a387cf92217","Type":"ContainerStarted","Data":"9b98ea71c7c93057826eec8ce2f64beba4ab4005ffceb8ceb38e66d4a3435aa7"} Jan 04 13:19:04 crc kubenswrapper[5003]: I0104 13:19:04.003975 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-g26d9" podStartSLOduration=3.374218559 podStartE2EDuration="6.003939111s" podCreationTimestamp="2026-01-04 13:18:58 +0000 UTC" firstStartedPulling="2026-01-04 13:19:00.612957278 +0000 UTC m=+5456.085987149" lastFinishedPulling="2026-01-04 13:19:03.24267786 +0000 UTC m=+5458.715707701" observedRunningTime="2026-01-04 13:19:04.002330358 +0000 UTC m=+5459.475360199" watchObservedRunningTime="2026-01-04 13:19:04.003939111 +0000 UTC m=+5459.476968952" Jan 04 13:19:04 crc kubenswrapper[5003]: I0104 13:19:04.991637 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"bfffcf93-0d03-45b0-ad26-ed2c799360f0","Type":"ContainerStarted","Data":"b5f5084789a152ca5d9c162e9929403a62c469f83f1d9cd9c9703f6ed96a2acf"} Jan 04 13:19:04 crc kubenswrapper[5003]: I0104 13:19:04.995139 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"9f3c6348-1679-407c-92e7-b8c0afa0591b","Type":"ContainerStarted","Data":"98550a77ae16b96027f9ddfcadf56e51a6ad2a6c42c50f2fc718946cbb40f943"} Jan 04 13:19:04 crc kubenswrapper[5003]: I0104 13:19:04.997295 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lkcf7" event={"ID":"b025b58a-ca68-4057-b474-e58ca758c727","Type":"ContainerStarted","Data":"b7e0b80b2489d7d16ce73d2a2b6db6018dd22032514860f249ec4a8128f0ee6a"} Jan 04 13:19:05 crc kubenswrapper[5003]: I0104 13:19:05.000094 5003 generic.go:334] "Generic (PLEG): container finished" podID="32653060-ac61-4bb1-aea3-836ee0e777cf" containerID="8c1d0154f9dfed67d7187c2c639352e2b94fe3f323e38c9e9c9e6fde56e79cec" exitCode=0 Jan 04 13:19:05 crc kubenswrapper[5003]: I0104 13:19:05.000206 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2vwnf" event={"ID":"32653060-ac61-4bb1-aea3-836ee0e777cf","Type":"ContainerDied","Data":"8c1d0154f9dfed67d7187c2c639352e2b94fe3f323e38c9e9c9e6fde56e79cec"} Jan 04 13:19:05 crc kubenswrapper[5003]: I0104 13:19:05.005570 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"ad42204b-8d7a-415b-a30c-2e8e4fb242d8","Type":"ContainerStarted","Data":"d3e8726f6cafca5e43993e24c49797b7847f42f3cc543199be30985d13afb94c"} Jan 04 13:19:05 crc kubenswrapper[5003]: I0104 13:19:05.027519 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=4.027495113 podStartE2EDuration="4.027495113s" podCreationTimestamp="2026-01-04 13:19:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 13:19:05.021445563 +0000 UTC m=+5460.494475404" watchObservedRunningTime="2026-01-04 13:19:05.027495113 +0000 UTC m=+5460.500524954" Jan 04 13:19:05 crc kubenswrapper[5003]: I0104 13:19:05.053792 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-1" podStartSLOduration=4.053758969 podStartE2EDuration="4.053758969s" podCreationTimestamp="2026-01-04 13:19:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 13:19:05.05040675 +0000 UTC m=+5460.523436591" watchObservedRunningTime="2026-01-04 13:19:05.053758969 +0000 UTC m=+5460.526788820" Jan 04 13:19:05 crc kubenswrapper[5003]: I0104 13:19:05.106249 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-2" podStartSLOduration=4.106222279 podStartE2EDuration="4.106222279s" podCreationTimestamp="2026-01-04 13:19:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 13:19:05.069588488 +0000 UTC m=+5460.542618339" watchObservedRunningTime="2026-01-04 13:19:05.106222279 +0000 UTC m=+5460.579252130" Jan 04 13:19:05 crc kubenswrapper[5003]: I0104 13:19:05.618934 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Jan 04 13:19:05 crc kubenswrapper[5003]: I0104 13:19:05.662472 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-1" Jan 04 13:19:05 crc kubenswrapper[5003]: I0104 13:19:05.665771 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Jan 04 13:19:05 crc kubenswrapper[5003]: I0104 13:19:05.682900 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-2" Jan 04 13:19:05 crc kubenswrapper[5003]: I0104 13:19:05.691948 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-2" Jan 04 13:19:05 crc kubenswrapper[5003]: I0104 13:19:05.707066 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-1" Jan 04 13:19:06 crc kubenswrapper[5003]: I0104 13:19:06.017839 5003 generic.go:334] "Generic (PLEG): container finished" podID="b025b58a-ca68-4057-b474-e58ca758c727" containerID="b7e0b80b2489d7d16ce73d2a2b6db6018dd22032514860f249ec4a8128f0ee6a" exitCode=0 Jan 04 13:19:06 crc kubenswrapper[5003]: I0104 13:19:06.017945 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lkcf7" event={"ID":"b025b58a-ca68-4057-b474-e58ca758c727","Type":"ContainerDied","Data":"b7e0b80b2489d7d16ce73d2a2b6db6018dd22032514860f249ec4a8128f0ee6a"} Jan 04 13:19:06 crc kubenswrapper[5003]: I0104 13:19:06.022389 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2vwnf" event={"ID":"32653060-ac61-4bb1-aea3-836ee0e777cf","Type":"ContainerStarted","Data":"3f05a8daaa10ee961963f9d44213353d814d2fa7fa196b2c1992e20dfb045a73"} Jan 04 13:19:06 crc kubenswrapper[5003]: I0104 13:19:06.658831 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Jan 04 13:19:06 crc kubenswrapper[5003]: I0104 13:19:06.662819 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-1" Jan 04 13:19:06 crc kubenswrapper[5003]: I0104 13:19:06.686144 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-2vwnf" podStartSLOduration=2.9786077410000003 podStartE2EDuration="5.686117683s" podCreationTimestamp="2026-01-04 13:19:01 +0000 UTC" firstStartedPulling="2026-01-04 13:19:02.874238908 +0000 UTC m=+5458.347268739" lastFinishedPulling="2026-01-04 13:19:05.58174884 +0000 UTC m=+5461.054778681" observedRunningTime="2026-01-04 13:19:06.076470799 +0000 UTC m=+5461.549500650" watchObservedRunningTime="2026-01-04 13:19:06.686117683 +0000 UTC m=+5462.159147544" Jan 04 13:19:06 crc kubenswrapper[5003]: I0104 13:19:06.723121 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-1" Jan 04 13:19:06 crc kubenswrapper[5003]: I0104 13:19:06.739079 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-2" Jan 04 13:19:07 crc kubenswrapper[5003]: I0104 13:19:07.093270 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Jan 04 13:19:07 crc kubenswrapper[5003]: I0104 13:19:07.095668 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-1" Jan 04 13:19:07 crc kubenswrapper[5003]: I0104 13:19:07.095925 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-2" Jan 04 13:19:07 crc kubenswrapper[5003]: I0104 13:19:07.371645 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6dc945dddc-q2vlz"] Jan 04 13:19:07 crc kubenswrapper[5003]: I0104 13:19:07.374642 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6dc945dddc-q2vlz" Jan 04 13:19:07 crc kubenswrapper[5003]: I0104 13:19:07.376888 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Jan 04 13:19:07 crc kubenswrapper[5003]: I0104 13:19:07.393485 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6dc945dddc-q2vlz"] Jan 04 13:19:07 crc kubenswrapper[5003]: I0104 13:19:07.460943 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5ae3db07-4e12-496a-b370-2e61ae0de2d0-dns-svc\") pod \"dnsmasq-dns-6dc945dddc-q2vlz\" (UID: \"5ae3db07-4e12-496a-b370-2e61ae0de2d0\") " pod="openstack/dnsmasq-dns-6dc945dddc-q2vlz" Jan 04 13:19:07 crc kubenswrapper[5003]: I0104 13:19:07.461158 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5ae3db07-4e12-496a-b370-2e61ae0de2d0-ovsdbserver-nb\") pod \"dnsmasq-dns-6dc945dddc-q2vlz\" (UID: \"5ae3db07-4e12-496a-b370-2e61ae0de2d0\") " pod="openstack/dnsmasq-dns-6dc945dddc-q2vlz" Jan 04 13:19:07 crc kubenswrapper[5003]: I0104 13:19:07.461247 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfkjq\" (UniqueName: \"kubernetes.io/projected/5ae3db07-4e12-496a-b370-2e61ae0de2d0-kube-api-access-zfkjq\") pod \"dnsmasq-dns-6dc945dddc-q2vlz\" (UID: \"5ae3db07-4e12-496a-b370-2e61ae0de2d0\") " pod="openstack/dnsmasq-dns-6dc945dddc-q2vlz" Jan 04 13:19:07 crc kubenswrapper[5003]: I0104 13:19:07.461321 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ae3db07-4e12-496a-b370-2e61ae0de2d0-config\") pod \"dnsmasq-dns-6dc945dddc-q2vlz\" (UID: \"5ae3db07-4e12-496a-b370-2e61ae0de2d0\") " pod="openstack/dnsmasq-dns-6dc945dddc-q2vlz" Jan 04 13:19:07 crc kubenswrapper[5003]: I0104 13:19:07.562866 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfkjq\" (UniqueName: \"kubernetes.io/projected/5ae3db07-4e12-496a-b370-2e61ae0de2d0-kube-api-access-zfkjq\") pod \"dnsmasq-dns-6dc945dddc-q2vlz\" (UID: \"5ae3db07-4e12-496a-b370-2e61ae0de2d0\") " pod="openstack/dnsmasq-dns-6dc945dddc-q2vlz" Jan 04 13:19:07 crc kubenswrapper[5003]: I0104 13:19:07.563194 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ae3db07-4e12-496a-b370-2e61ae0de2d0-config\") pod \"dnsmasq-dns-6dc945dddc-q2vlz\" (UID: \"5ae3db07-4e12-496a-b370-2e61ae0de2d0\") " pod="openstack/dnsmasq-dns-6dc945dddc-q2vlz" Jan 04 13:19:07 crc kubenswrapper[5003]: I0104 13:19:07.563334 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5ae3db07-4e12-496a-b370-2e61ae0de2d0-dns-svc\") pod \"dnsmasq-dns-6dc945dddc-q2vlz\" (UID: \"5ae3db07-4e12-496a-b370-2e61ae0de2d0\") " pod="openstack/dnsmasq-dns-6dc945dddc-q2vlz" Jan 04 13:19:07 crc kubenswrapper[5003]: I0104 13:19:07.563428 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5ae3db07-4e12-496a-b370-2e61ae0de2d0-ovsdbserver-nb\") pod \"dnsmasq-dns-6dc945dddc-q2vlz\" (UID: \"5ae3db07-4e12-496a-b370-2e61ae0de2d0\") " pod="openstack/dnsmasq-dns-6dc945dddc-q2vlz" Jan 04 13:19:07 crc kubenswrapper[5003]: I0104 13:19:07.564403 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5ae3db07-4e12-496a-b370-2e61ae0de2d0-ovsdbserver-nb\") pod \"dnsmasq-dns-6dc945dddc-q2vlz\" (UID: \"5ae3db07-4e12-496a-b370-2e61ae0de2d0\") " pod="openstack/dnsmasq-dns-6dc945dddc-q2vlz" Jan 04 13:19:07 crc kubenswrapper[5003]: I0104 13:19:07.565382 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ae3db07-4e12-496a-b370-2e61ae0de2d0-config\") pod \"dnsmasq-dns-6dc945dddc-q2vlz\" (UID: \"5ae3db07-4e12-496a-b370-2e61ae0de2d0\") " pod="openstack/dnsmasq-dns-6dc945dddc-q2vlz" Jan 04 13:19:07 crc kubenswrapper[5003]: I0104 13:19:07.565953 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5ae3db07-4e12-496a-b370-2e61ae0de2d0-dns-svc\") pod \"dnsmasq-dns-6dc945dddc-q2vlz\" (UID: \"5ae3db07-4e12-496a-b370-2e61ae0de2d0\") " pod="openstack/dnsmasq-dns-6dc945dddc-q2vlz" Jan 04 13:19:07 crc kubenswrapper[5003]: I0104 13:19:07.589070 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfkjq\" (UniqueName: \"kubernetes.io/projected/5ae3db07-4e12-496a-b370-2e61ae0de2d0-kube-api-access-zfkjq\") pod \"dnsmasq-dns-6dc945dddc-q2vlz\" (UID: \"5ae3db07-4e12-496a-b370-2e61ae0de2d0\") " pod="openstack/dnsmasq-dns-6dc945dddc-q2vlz" Jan 04 13:19:07 crc kubenswrapper[5003]: I0104 13:19:07.665573 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Jan 04 13:19:07 crc kubenswrapper[5003]: I0104 13:19:07.682695 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-2" Jan 04 13:19:07 crc kubenswrapper[5003]: I0104 13:19:07.692094 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6dc945dddc-q2vlz" Jan 04 13:19:07 crc kubenswrapper[5003]: I0104 13:19:07.706580 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-1" Jan 04 13:19:08 crc kubenswrapper[5003]: I0104 13:19:08.029974 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6dc945dddc-q2vlz"] Jan 04 13:19:08 crc kubenswrapper[5003]: I0104 13:19:08.055390 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lkcf7" event={"ID":"b025b58a-ca68-4057-b474-e58ca758c727","Type":"ContainerStarted","Data":"19f72ce4f7a317cd2259851f9b745318f1d57be5e8038328ecd01f17db7089ac"} Jan 04 13:19:08 crc kubenswrapper[5003]: I0104 13:19:08.095605 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-lkcf7" podStartSLOduration=4.29686466 podStartE2EDuration="7.09558259s" podCreationTimestamp="2026-01-04 13:19:01 +0000 UTC" firstStartedPulling="2026-01-04 13:19:03.923626963 +0000 UTC m=+5459.396656804" lastFinishedPulling="2026-01-04 13:19:06.722344893 +0000 UTC m=+5462.195374734" observedRunningTime="2026-01-04 13:19:08.090292169 +0000 UTC m=+5463.563322010" watchObservedRunningTime="2026-01-04 13:19:08.09558259 +0000 UTC m=+5463.568612421" Jan 04 13:19:08 crc kubenswrapper[5003]: I0104 13:19:08.710308 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Jan 04 13:19:08 crc kubenswrapper[5003]: I0104 13:19:08.776283 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-2" Jan 04 13:19:08 crc kubenswrapper[5003]: I0104 13:19:08.804182 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-1" Jan 04 13:19:08 crc kubenswrapper[5003]: I0104 13:19:08.875736 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-g26d9" Jan 04 13:19:08 crc kubenswrapper[5003]: I0104 13:19:08.875814 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-g26d9" Jan 04 13:19:09 crc kubenswrapper[5003]: I0104 13:19:09.014204 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-g26d9" Jan 04 13:19:09 crc kubenswrapper[5003]: I0104 13:19:09.069728 5003 generic.go:334] "Generic (PLEG): container finished" podID="5ae3db07-4e12-496a-b370-2e61ae0de2d0" containerID="cade5910d25186f40d7ff1353a7d29dbbf8093d45ac74d9518df22ea5fc31e74" exitCode=0 Jan 04 13:19:09 crc kubenswrapper[5003]: I0104 13:19:09.070474 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dc945dddc-q2vlz" event={"ID":"5ae3db07-4e12-496a-b370-2e61ae0de2d0","Type":"ContainerDied","Data":"cade5910d25186f40d7ff1353a7d29dbbf8093d45ac74d9518df22ea5fc31e74"} Jan 04 13:19:09 crc kubenswrapper[5003]: I0104 13:19:09.070543 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dc945dddc-q2vlz" event={"ID":"5ae3db07-4e12-496a-b370-2e61ae0de2d0","Type":"ContainerStarted","Data":"50b32680ce35cc71317b7f80abe2e8ea6a12373941b7520ea4746f5182a30880"} Jan 04 13:19:09 crc kubenswrapper[5003]: I0104 13:19:09.145891 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Jan 04 13:19:09 crc kubenswrapper[5003]: I0104 13:19:09.155334 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-1" Jan 04 13:19:09 crc kubenswrapper[5003]: I0104 13:19:09.156275 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-2" Jan 04 13:19:09 crc kubenswrapper[5003]: I0104 13:19:09.163862 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-g26d9" Jan 04 13:19:09 crc kubenswrapper[5003]: I0104 13:19:09.403966 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6dc945dddc-q2vlz"] Jan 04 13:19:09 crc kubenswrapper[5003]: I0104 13:19:09.418841 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 13:19:09 crc kubenswrapper[5003]: I0104 13:19:09.418969 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 13:19:09 crc kubenswrapper[5003]: I0104 13:19:09.424221 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-659c7d5767-9tcl7"] Jan 04 13:19:09 crc kubenswrapper[5003]: I0104 13:19:09.425893 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-659c7d5767-9tcl7" Jan 04 13:19:09 crc kubenswrapper[5003]: I0104 13:19:09.430994 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Jan 04 13:19:09 crc kubenswrapper[5003]: I0104 13:19:09.437581 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-659c7d5767-9tcl7"] Jan 04 13:19:09 crc kubenswrapper[5003]: I0104 13:19:09.506104 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d821cd2d-e303-46e8-ae8f-fd92653ec465-dns-svc\") pod \"dnsmasq-dns-659c7d5767-9tcl7\" (UID: \"d821cd2d-e303-46e8-ae8f-fd92653ec465\") " pod="openstack/dnsmasq-dns-659c7d5767-9tcl7" Jan 04 13:19:09 crc kubenswrapper[5003]: I0104 13:19:09.506178 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d821cd2d-e303-46e8-ae8f-fd92653ec465-config\") pod \"dnsmasq-dns-659c7d5767-9tcl7\" (UID: \"d821cd2d-e303-46e8-ae8f-fd92653ec465\") " pod="openstack/dnsmasq-dns-659c7d5767-9tcl7" Jan 04 13:19:09 crc kubenswrapper[5003]: I0104 13:19:09.506210 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwzxp\" (UniqueName: \"kubernetes.io/projected/d821cd2d-e303-46e8-ae8f-fd92653ec465-kube-api-access-kwzxp\") pod \"dnsmasq-dns-659c7d5767-9tcl7\" (UID: \"d821cd2d-e303-46e8-ae8f-fd92653ec465\") " pod="openstack/dnsmasq-dns-659c7d5767-9tcl7" Jan 04 13:19:09 crc kubenswrapper[5003]: I0104 13:19:09.506742 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d821cd2d-e303-46e8-ae8f-fd92653ec465-ovsdbserver-nb\") pod \"dnsmasq-dns-659c7d5767-9tcl7\" (UID: \"d821cd2d-e303-46e8-ae8f-fd92653ec465\") " pod="openstack/dnsmasq-dns-659c7d5767-9tcl7" Jan 04 13:19:09 crc kubenswrapper[5003]: I0104 13:19:09.507211 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d821cd2d-e303-46e8-ae8f-fd92653ec465-ovsdbserver-sb\") pod \"dnsmasq-dns-659c7d5767-9tcl7\" (UID: \"d821cd2d-e303-46e8-ae8f-fd92653ec465\") " pod="openstack/dnsmasq-dns-659c7d5767-9tcl7" Jan 04 13:19:09 crc kubenswrapper[5003]: I0104 13:19:09.609440 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d821cd2d-e303-46e8-ae8f-fd92653ec465-ovsdbserver-nb\") pod \"dnsmasq-dns-659c7d5767-9tcl7\" (UID: \"d821cd2d-e303-46e8-ae8f-fd92653ec465\") " pod="openstack/dnsmasq-dns-659c7d5767-9tcl7" Jan 04 13:19:09 crc kubenswrapper[5003]: I0104 13:19:09.609524 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d821cd2d-e303-46e8-ae8f-fd92653ec465-ovsdbserver-sb\") pod \"dnsmasq-dns-659c7d5767-9tcl7\" (UID: \"d821cd2d-e303-46e8-ae8f-fd92653ec465\") " pod="openstack/dnsmasq-dns-659c7d5767-9tcl7" Jan 04 13:19:09 crc kubenswrapper[5003]: I0104 13:19:09.609579 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d821cd2d-e303-46e8-ae8f-fd92653ec465-dns-svc\") pod \"dnsmasq-dns-659c7d5767-9tcl7\" (UID: \"d821cd2d-e303-46e8-ae8f-fd92653ec465\") " pod="openstack/dnsmasq-dns-659c7d5767-9tcl7" Jan 04 13:19:09 crc kubenswrapper[5003]: I0104 13:19:09.609624 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d821cd2d-e303-46e8-ae8f-fd92653ec465-config\") pod \"dnsmasq-dns-659c7d5767-9tcl7\" (UID: \"d821cd2d-e303-46e8-ae8f-fd92653ec465\") " pod="openstack/dnsmasq-dns-659c7d5767-9tcl7" Jan 04 13:19:09 crc kubenswrapper[5003]: I0104 13:19:09.609656 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwzxp\" (UniqueName: \"kubernetes.io/projected/d821cd2d-e303-46e8-ae8f-fd92653ec465-kube-api-access-kwzxp\") pod \"dnsmasq-dns-659c7d5767-9tcl7\" (UID: \"d821cd2d-e303-46e8-ae8f-fd92653ec465\") " pod="openstack/dnsmasq-dns-659c7d5767-9tcl7" Jan 04 13:19:09 crc kubenswrapper[5003]: I0104 13:19:09.610327 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d821cd2d-e303-46e8-ae8f-fd92653ec465-ovsdbserver-nb\") pod \"dnsmasq-dns-659c7d5767-9tcl7\" (UID: \"d821cd2d-e303-46e8-ae8f-fd92653ec465\") " pod="openstack/dnsmasq-dns-659c7d5767-9tcl7" Jan 04 13:19:09 crc kubenswrapper[5003]: I0104 13:19:09.610651 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d821cd2d-e303-46e8-ae8f-fd92653ec465-ovsdbserver-sb\") pod \"dnsmasq-dns-659c7d5767-9tcl7\" (UID: \"d821cd2d-e303-46e8-ae8f-fd92653ec465\") " pod="openstack/dnsmasq-dns-659c7d5767-9tcl7" Jan 04 13:19:09 crc kubenswrapper[5003]: I0104 13:19:09.610711 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d821cd2d-e303-46e8-ae8f-fd92653ec465-config\") pod \"dnsmasq-dns-659c7d5767-9tcl7\" (UID: \"d821cd2d-e303-46e8-ae8f-fd92653ec465\") " pod="openstack/dnsmasq-dns-659c7d5767-9tcl7" Jan 04 13:19:09 crc kubenswrapper[5003]: I0104 13:19:09.611609 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d821cd2d-e303-46e8-ae8f-fd92653ec465-dns-svc\") pod \"dnsmasq-dns-659c7d5767-9tcl7\" (UID: \"d821cd2d-e303-46e8-ae8f-fd92653ec465\") " pod="openstack/dnsmasq-dns-659c7d5767-9tcl7" Jan 04 13:19:09 crc kubenswrapper[5003]: I0104 13:19:09.640225 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwzxp\" (UniqueName: \"kubernetes.io/projected/d821cd2d-e303-46e8-ae8f-fd92653ec465-kube-api-access-kwzxp\") pod \"dnsmasq-dns-659c7d5767-9tcl7\" (UID: \"d821cd2d-e303-46e8-ae8f-fd92653ec465\") " pod="openstack/dnsmasq-dns-659c7d5767-9tcl7" Jan 04 13:19:09 crc kubenswrapper[5003]: I0104 13:19:09.758182 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-659c7d5767-9tcl7" Jan 04 13:19:10 crc kubenswrapper[5003]: I0104 13:19:10.086633 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dc945dddc-q2vlz" event={"ID":"5ae3db07-4e12-496a-b370-2e61ae0de2d0","Type":"ContainerStarted","Data":"ac7aa467c8e88e3b9b97401530e083a015793b9c259d1e569e4675b36c48c71b"} Jan 04 13:19:10 crc kubenswrapper[5003]: I0104 13:19:10.087930 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6dc945dddc-q2vlz" podUID="5ae3db07-4e12-496a-b370-2e61ae0de2d0" containerName="dnsmasq-dns" containerID="cri-o://ac7aa467c8e88e3b9b97401530e083a015793b9c259d1e569e4675b36c48c71b" gracePeriod=10 Jan 04 13:19:10 crc kubenswrapper[5003]: I0104 13:19:10.088255 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6dc945dddc-q2vlz" Jan 04 13:19:10 crc kubenswrapper[5003]: I0104 13:19:10.112122 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6dc945dddc-q2vlz" podStartSLOduration=3.112083002 podStartE2EDuration="3.112083002s" podCreationTimestamp="2026-01-04 13:19:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 13:19:10.109604387 +0000 UTC m=+5465.582634228" watchObservedRunningTime="2026-01-04 13:19:10.112083002 +0000 UTC m=+5465.585112853" Jan 04 13:19:10 crc kubenswrapper[5003]: I0104 13:19:10.287614 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-659c7d5767-9tcl7"] Jan 04 13:19:10 crc kubenswrapper[5003]: W0104 13:19:10.290920 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd821cd2d_e303_46e8_ae8f_fd92653ec465.slice/crio-6504ffb3bf31d0b3b364c4fe2eb181590cbfa3db5e5044f7882257b4af7fc857 WatchSource:0}: Error finding container 6504ffb3bf31d0b3b364c4fe2eb181590cbfa3db5e5044f7882257b4af7fc857: Status 404 returned error can't find the container with id 6504ffb3bf31d0b3b364c4fe2eb181590cbfa3db5e5044f7882257b4af7fc857 Jan 04 13:19:11 crc kubenswrapper[5003]: I0104 13:19:11.083540 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-g26d9"] Jan 04 13:19:11 crc kubenswrapper[5003]: I0104 13:19:11.126502 5003 generic.go:334] "Generic (PLEG): container finished" podID="5ae3db07-4e12-496a-b370-2e61ae0de2d0" containerID="ac7aa467c8e88e3b9b97401530e083a015793b9c259d1e569e4675b36c48c71b" exitCode=0 Jan 04 13:19:11 crc kubenswrapper[5003]: I0104 13:19:11.126623 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dc945dddc-q2vlz" event={"ID":"5ae3db07-4e12-496a-b370-2e61ae0de2d0","Type":"ContainerDied","Data":"ac7aa467c8e88e3b9b97401530e083a015793b9c259d1e569e4675b36c48c71b"} Jan 04 13:19:11 crc kubenswrapper[5003]: I0104 13:19:11.131652 5003 generic.go:334] "Generic (PLEG): container finished" podID="d821cd2d-e303-46e8-ae8f-fd92653ec465" containerID="67e308eb51235fed3bd521028cc28bda96693cc9eaffff14d725482032ffaf4b" exitCode=0 Jan 04 13:19:11 crc kubenswrapper[5003]: I0104 13:19:11.132000 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-g26d9" podUID="d729952a-6fc6-43dc-96b7-9a387cf92217" containerName="registry-server" containerID="cri-o://9b98ea71c7c93057826eec8ce2f64beba4ab4005ffceb8ceb38e66d4a3435aa7" gracePeriod=2 Jan 04 13:19:11 crc kubenswrapper[5003]: I0104 13:19:11.134250 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-659c7d5767-9tcl7" event={"ID":"d821cd2d-e303-46e8-ae8f-fd92653ec465","Type":"ContainerDied","Data":"67e308eb51235fed3bd521028cc28bda96693cc9eaffff14d725482032ffaf4b"} Jan 04 13:19:11 crc kubenswrapper[5003]: I0104 13:19:11.134402 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-659c7d5767-9tcl7" event={"ID":"d821cd2d-e303-46e8-ae8f-fd92653ec465","Type":"ContainerStarted","Data":"6504ffb3bf31d0b3b364c4fe2eb181590cbfa3db5e5044f7882257b4af7fc857"} Jan 04 13:19:11 crc kubenswrapper[5003]: I0104 13:19:11.382448 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6dc945dddc-q2vlz" Jan 04 13:19:11 crc kubenswrapper[5003]: I0104 13:19:11.454161 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5ae3db07-4e12-496a-b370-2e61ae0de2d0-dns-svc\") pod \"5ae3db07-4e12-496a-b370-2e61ae0de2d0\" (UID: \"5ae3db07-4e12-496a-b370-2e61ae0de2d0\") " Jan 04 13:19:11 crc kubenswrapper[5003]: I0104 13:19:11.454271 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ae3db07-4e12-496a-b370-2e61ae0de2d0-config\") pod \"5ae3db07-4e12-496a-b370-2e61ae0de2d0\" (UID: \"5ae3db07-4e12-496a-b370-2e61ae0de2d0\") " Jan 04 13:19:11 crc kubenswrapper[5003]: I0104 13:19:11.454371 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5ae3db07-4e12-496a-b370-2e61ae0de2d0-ovsdbserver-nb\") pod \"5ae3db07-4e12-496a-b370-2e61ae0de2d0\" (UID: \"5ae3db07-4e12-496a-b370-2e61ae0de2d0\") " Jan 04 13:19:11 crc kubenswrapper[5003]: I0104 13:19:11.454406 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zfkjq\" (UniqueName: \"kubernetes.io/projected/5ae3db07-4e12-496a-b370-2e61ae0de2d0-kube-api-access-zfkjq\") pod \"5ae3db07-4e12-496a-b370-2e61ae0de2d0\" (UID: \"5ae3db07-4e12-496a-b370-2e61ae0de2d0\") " Jan 04 13:19:11 crc kubenswrapper[5003]: I0104 13:19:11.463429 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ae3db07-4e12-496a-b370-2e61ae0de2d0-kube-api-access-zfkjq" (OuterVolumeSpecName: "kube-api-access-zfkjq") pod "5ae3db07-4e12-496a-b370-2e61ae0de2d0" (UID: "5ae3db07-4e12-496a-b370-2e61ae0de2d0"). InnerVolumeSpecName "kube-api-access-zfkjq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:19:11 crc kubenswrapper[5003]: I0104 13:19:11.503837 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5ae3db07-4e12-496a-b370-2e61ae0de2d0-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5ae3db07-4e12-496a-b370-2e61ae0de2d0" (UID: "5ae3db07-4e12-496a-b370-2e61ae0de2d0"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 13:19:11 crc kubenswrapper[5003]: I0104 13:19:11.504345 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5ae3db07-4e12-496a-b370-2e61ae0de2d0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5ae3db07-4e12-496a-b370-2e61ae0de2d0" (UID: "5ae3db07-4e12-496a-b370-2e61ae0de2d0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 13:19:11 crc kubenswrapper[5003]: I0104 13:19:11.518622 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5ae3db07-4e12-496a-b370-2e61ae0de2d0-config" (OuterVolumeSpecName: "config") pod "5ae3db07-4e12-496a-b370-2e61ae0de2d0" (UID: "5ae3db07-4e12-496a-b370-2e61ae0de2d0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 13:19:11 crc kubenswrapper[5003]: I0104 13:19:11.559898 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ae3db07-4e12-496a-b370-2e61ae0de2d0-config\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:11 crc kubenswrapper[5003]: I0104 13:19:11.559948 5003 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5ae3db07-4e12-496a-b370-2e61ae0de2d0-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:11 crc kubenswrapper[5003]: I0104 13:19:11.559967 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zfkjq\" (UniqueName: \"kubernetes.io/projected/5ae3db07-4e12-496a-b370-2e61ae0de2d0-kube-api-access-zfkjq\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:11 crc kubenswrapper[5003]: I0104 13:19:11.559977 5003 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5ae3db07-4e12-496a-b370-2e61ae0de2d0-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:11 crc kubenswrapper[5003]: I0104 13:19:11.636751 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-2vwnf" Jan 04 13:19:11 crc kubenswrapper[5003]: I0104 13:19:11.636802 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-2vwnf" Jan 04 13:19:11 crc kubenswrapper[5003]: I0104 13:19:11.704649 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-2vwnf" Jan 04 13:19:11 crc kubenswrapper[5003]: I0104 13:19:11.960319 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-lkcf7" Jan 04 13:19:11 crc kubenswrapper[5003]: I0104 13:19:11.960679 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-lkcf7" Jan 04 13:19:11 crc kubenswrapper[5003]: I0104 13:19:11.995087 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-copy-data"] Jan 04 13:19:11 crc kubenswrapper[5003]: E0104 13:19:11.995415 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ae3db07-4e12-496a-b370-2e61ae0de2d0" containerName="init" Jan 04 13:19:11 crc kubenswrapper[5003]: I0104 13:19:11.995430 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ae3db07-4e12-496a-b370-2e61ae0de2d0" containerName="init" Jan 04 13:19:11 crc kubenswrapper[5003]: E0104 13:19:11.995453 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ae3db07-4e12-496a-b370-2e61ae0de2d0" containerName="dnsmasq-dns" Jan 04 13:19:11 crc kubenswrapper[5003]: I0104 13:19:11.995459 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ae3db07-4e12-496a-b370-2e61ae0de2d0" containerName="dnsmasq-dns" Jan 04 13:19:11 crc kubenswrapper[5003]: I0104 13:19:11.995606 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ae3db07-4e12-496a-b370-2e61ae0de2d0" containerName="dnsmasq-dns" Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.002107 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.005891 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovn-data-cert" Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.020146 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.067132 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtdjl\" (UniqueName: \"kubernetes.io/projected/b82c43c6-27e4-4e10-a312-096301b0e50f-kube-api-access-xtdjl\") pod \"ovn-copy-data\" (UID: \"b82c43c6-27e4-4e10-a312-096301b0e50f\") " pod="openstack/ovn-copy-data" Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.067212 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/b82c43c6-27e4-4e10-a312-096301b0e50f-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"b82c43c6-27e4-4e10-a312-096301b0e50f\") " pod="openstack/ovn-copy-data" Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.067246 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-cf5ebca9-cd0a-4ae7-9866-987f06d20e15\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cf5ebca9-cd0a-4ae7-9866-987f06d20e15\") pod \"ovn-copy-data\" (UID: \"b82c43c6-27e4-4e10-a312-096301b0e50f\") " pod="openstack/ovn-copy-data" Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.146782 5003 generic.go:334] "Generic (PLEG): container finished" podID="d729952a-6fc6-43dc-96b7-9a387cf92217" containerID="9b98ea71c7c93057826eec8ce2f64beba4ab4005ffceb8ceb38e66d4a3435aa7" exitCode=0 Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.146877 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g26d9" event={"ID":"d729952a-6fc6-43dc-96b7-9a387cf92217","Type":"ContainerDied","Data":"9b98ea71c7c93057826eec8ce2f64beba4ab4005ffceb8ceb38e66d4a3435aa7"} Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.149459 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6dc945dddc-q2vlz" event={"ID":"5ae3db07-4e12-496a-b370-2e61ae0de2d0","Type":"ContainerDied","Data":"50b32680ce35cc71317b7f80abe2e8ea6a12373941b7520ea4746f5182a30880"} Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.149505 5003 scope.go:117] "RemoveContainer" containerID="ac7aa467c8e88e3b9b97401530e083a015793b9c259d1e569e4675b36c48c71b" Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.149526 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6dc945dddc-q2vlz" Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.154260 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-659c7d5767-9tcl7" event={"ID":"d821cd2d-e303-46e8-ae8f-fd92653ec465","Type":"ContainerStarted","Data":"83c58d976650597f269ca85c443fd99d82576d7466c8f9ac3a32938fa21f4ed0"} Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.155380 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-659c7d5767-9tcl7" Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.169433 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtdjl\" (UniqueName: \"kubernetes.io/projected/b82c43c6-27e4-4e10-a312-096301b0e50f-kube-api-access-xtdjl\") pod \"ovn-copy-data\" (UID: \"b82c43c6-27e4-4e10-a312-096301b0e50f\") " pod="openstack/ovn-copy-data" Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.169557 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/b82c43c6-27e4-4e10-a312-096301b0e50f-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"b82c43c6-27e4-4e10-a312-096301b0e50f\") " pod="openstack/ovn-copy-data" Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.169605 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-cf5ebca9-cd0a-4ae7-9866-987f06d20e15\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cf5ebca9-cd0a-4ae7-9866-987f06d20e15\") pod \"ovn-copy-data\" (UID: \"b82c43c6-27e4-4e10-a312-096301b0e50f\") " pod="openstack/ovn-copy-data" Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.178186 5003 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.178237 5003 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-cf5ebca9-cd0a-4ae7-9866-987f06d20e15\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cf5ebca9-cd0a-4ae7-9866-987f06d20e15\") pod \"ovn-copy-data\" (UID: \"b82c43c6-27e4-4e10-a312-096301b0e50f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/756fec2ae6dbec4a6d42caf2fdd53ca8b929ff18ed338e05cb23724c4e97e8c0/globalmount\"" pod="openstack/ovn-copy-data" Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.180115 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/b82c43c6-27e4-4e10-a312-096301b0e50f-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"b82c43c6-27e4-4e10-a312-096301b0e50f\") " pod="openstack/ovn-copy-data" Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.189278 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-659c7d5767-9tcl7" podStartSLOduration=3.189249092 podStartE2EDuration="3.189249092s" podCreationTimestamp="2026-01-04 13:19:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 13:19:12.185598175 +0000 UTC m=+5467.658628026" watchObservedRunningTime="2026-01-04 13:19:12.189249092 +0000 UTC m=+5467.662278933" Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.195181 5003 scope.go:117] "RemoveContainer" containerID="cade5910d25186f40d7ff1353a7d29dbbf8093d45ac74d9518df22ea5fc31e74" Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.195831 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtdjl\" (UniqueName: \"kubernetes.io/projected/b82c43c6-27e4-4e10-a312-096301b0e50f-kube-api-access-xtdjl\") pod \"ovn-copy-data\" (UID: \"b82c43c6-27e4-4e10-a312-096301b0e50f\") " pod="openstack/ovn-copy-data" Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.220143 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6dc945dddc-q2vlz"] Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.220298 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-2vwnf" Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.225819 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6dc945dddc-q2vlz"] Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.235033 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-cf5ebca9-cd0a-4ae7-9866-987f06d20e15\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cf5ebca9-cd0a-4ae7-9866-987f06d20e15\") pod \"ovn-copy-data\" (UID: \"b82c43c6-27e4-4e10-a312-096301b0e50f\") " pod="openstack/ovn-copy-data" Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.323307 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.702771 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g26d9" Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.818281 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ae3db07-4e12-496a-b370-2e61ae0de2d0" path="/var/lib/kubelet/pods/5ae3db07-4e12-496a-b370-2e61ae0de2d0/volumes" Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.883376 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d729952a-6fc6-43dc-96b7-9a387cf92217-utilities\") pod \"d729952a-6fc6-43dc-96b7-9a387cf92217\" (UID: \"d729952a-6fc6-43dc-96b7-9a387cf92217\") " Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.884003 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d729952a-6fc6-43dc-96b7-9a387cf92217-catalog-content\") pod \"d729952a-6fc6-43dc-96b7-9a387cf92217\" (UID: \"d729952a-6fc6-43dc-96b7-9a387cf92217\") " Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.884064 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jx9pp\" (UniqueName: \"kubernetes.io/projected/d729952a-6fc6-43dc-96b7-9a387cf92217-kube-api-access-jx9pp\") pod \"d729952a-6fc6-43dc-96b7-9a387cf92217\" (UID: \"d729952a-6fc6-43dc-96b7-9a387cf92217\") " Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.884728 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d729952a-6fc6-43dc-96b7-9a387cf92217-utilities" (OuterVolumeSpecName: "utilities") pod "d729952a-6fc6-43dc-96b7-9a387cf92217" (UID: "d729952a-6fc6-43dc-96b7-9a387cf92217"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.886299 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d729952a-6fc6-43dc-96b7-9a387cf92217-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.893869 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d729952a-6fc6-43dc-96b7-9a387cf92217-kube-api-access-jx9pp" (OuterVolumeSpecName: "kube-api-access-jx9pp") pod "d729952a-6fc6-43dc-96b7-9a387cf92217" (UID: "d729952a-6fc6-43dc-96b7-9a387cf92217"). InnerVolumeSpecName "kube-api-access-jx9pp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.932180 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d729952a-6fc6-43dc-96b7-9a387cf92217-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d729952a-6fc6-43dc-96b7-9a387cf92217" (UID: "d729952a-6fc6-43dc-96b7-9a387cf92217"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.958210 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Jan 04 13:19:12 crc kubenswrapper[5003]: W0104 13:19:12.960172 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb82c43c6_27e4_4e10_a312_096301b0e50f.slice/crio-42d247701895dbf93ce63730233cab089737674ec4dc0c6bf360c6c2abc0bcb6 WatchSource:0}: Error finding container 42d247701895dbf93ce63730233cab089737674ec4dc0c6bf360c6c2abc0bcb6: Status 404 returned error can't find the container with id 42d247701895dbf93ce63730233cab089737674ec4dc0c6bf360c6c2abc0bcb6 Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.988327 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d729952a-6fc6-43dc-96b7-9a387cf92217-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:12 crc kubenswrapper[5003]: I0104 13:19:12.988383 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jx9pp\" (UniqueName: \"kubernetes.io/projected/d729952a-6fc6-43dc-96b7-9a387cf92217-kube-api-access-jx9pp\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:13 crc kubenswrapper[5003]: I0104 13:19:13.028280 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-lkcf7" podUID="b025b58a-ca68-4057-b474-e58ca758c727" containerName="registry-server" probeResult="failure" output=< Jan 04 13:19:13 crc kubenswrapper[5003]: timeout: failed to connect service ":50051" within 1s Jan 04 13:19:13 crc kubenswrapper[5003]: > Jan 04 13:19:13 crc kubenswrapper[5003]: I0104 13:19:13.173820 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g26d9" event={"ID":"d729952a-6fc6-43dc-96b7-9a387cf92217","Type":"ContainerDied","Data":"12714cbe38dd392d48d5fba1251d6b6d1e1bcd1bb8a16ebf0b10cc90457e6433"} Jan 04 13:19:13 crc kubenswrapper[5003]: I0104 13:19:13.173907 5003 scope.go:117] "RemoveContainer" containerID="9b98ea71c7c93057826eec8ce2f64beba4ab4005ffceb8ceb38e66d4a3435aa7" Jan 04 13:19:13 crc kubenswrapper[5003]: I0104 13:19:13.174120 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g26d9" Jan 04 13:19:13 crc kubenswrapper[5003]: I0104 13:19:13.181396 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"b82c43c6-27e4-4e10-a312-096301b0e50f","Type":"ContainerStarted","Data":"42d247701895dbf93ce63730233cab089737674ec4dc0c6bf360c6c2abc0bcb6"} Jan 04 13:19:13 crc kubenswrapper[5003]: I0104 13:19:13.222302 5003 scope.go:117] "RemoveContainer" containerID="699b862039f11a9a09809d8cebbaae3ed8cc190bc22508e6bf8b83d05525a823" Jan 04 13:19:13 crc kubenswrapper[5003]: I0104 13:19:13.238098 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-g26d9"] Jan 04 13:19:13 crc kubenswrapper[5003]: I0104 13:19:13.254429 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-g26d9"] Jan 04 13:19:13 crc kubenswrapper[5003]: I0104 13:19:13.255191 5003 scope.go:117] "RemoveContainer" containerID="7530188867b264019789cb95e71b0617ac88e5bc91950354c167bfa6da99c22d" Jan 04 13:19:14 crc kubenswrapper[5003]: I0104 13:19:14.079181 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2vwnf"] Jan 04 13:19:14 crc kubenswrapper[5003]: I0104 13:19:14.199535 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"b82c43c6-27e4-4e10-a312-096301b0e50f","Type":"ContainerStarted","Data":"c3a0677e5a16f154ecd3877c40a7f840b5b64c17cf7ad5187e0e35e7581cc91b"} Jan 04 13:19:14 crc kubenswrapper[5003]: I0104 13:19:14.199862 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-2vwnf" podUID="32653060-ac61-4bb1-aea3-836ee0e777cf" containerName="registry-server" containerID="cri-o://3f05a8daaa10ee961963f9d44213353d814d2fa7fa196b2c1992e20dfb045a73" gracePeriod=2 Jan 04 13:19:14 crc kubenswrapper[5003]: I0104 13:19:14.737820 5003 scope.go:117] "RemoveContainer" containerID="9dc63ccc6b38483c46725558dec45fa17a62b8b20968bbd43dd9242a22a34c33" Jan 04 13:19:14 crc kubenswrapper[5003]: I0104 13:19:14.822269 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d729952a-6fc6-43dc-96b7-9a387cf92217" path="/var/lib/kubelet/pods/d729952a-6fc6-43dc-96b7-9a387cf92217/volumes" Jan 04 13:19:15 crc kubenswrapper[5003]: I0104 13:19:15.216038 5003 generic.go:334] "Generic (PLEG): container finished" podID="32653060-ac61-4bb1-aea3-836ee0e777cf" containerID="3f05a8daaa10ee961963f9d44213353d814d2fa7fa196b2c1992e20dfb045a73" exitCode=0 Jan 04 13:19:15 crc kubenswrapper[5003]: I0104 13:19:15.216142 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2vwnf" event={"ID":"32653060-ac61-4bb1-aea3-836ee0e777cf","Type":"ContainerDied","Data":"3f05a8daaa10ee961963f9d44213353d814d2fa7fa196b2c1992e20dfb045a73"} Jan 04 13:19:15 crc kubenswrapper[5003]: I0104 13:19:15.216203 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2vwnf" event={"ID":"32653060-ac61-4bb1-aea3-836ee0e777cf","Type":"ContainerDied","Data":"8504d787a3eb52a5beb5ada22cfa082deaa6fb93d6b1914f93c3bee776401b4d"} Jan 04 13:19:15 crc kubenswrapper[5003]: I0104 13:19:15.216223 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8504d787a3eb52a5beb5ada22cfa082deaa6fb93d6b1914f93c3bee776401b4d" Jan 04 13:19:15 crc kubenswrapper[5003]: I0104 13:19:15.244317 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2vwnf" Jan 04 13:19:15 crc kubenswrapper[5003]: I0104 13:19:15.270965 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-copy-data" podStartSLOduration=4.612932283 podStartE2EDuration="5.270930149s" podCreationTimestamp="2026-01-04 13:19:10 +0000 UTC" firstStartedPulling="2026-01-04 13:19:12.962432189 +0000 UTC m=+5468.435462040" lastFinishedPulling="2026-01-04 13:19:13.620430065 +0000 UTC m=+5469.093459906" observedRunningTime="2026-01-04 13:19:14.23053601 +0000 UTC m=+5469.703565891" watchObservedRunningTime="2026-01-04 13:19:15.270930149 +0000 UTC m=+5470.743959990" Jan 04 13:19:15 crc kubenswrapper[5003]: I0104 13:19:15.359827 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ncmmz\" (UniqueName: \"kubernetes.io/projected/32653060-ac61-4bb1-aea3-836ee0e777cf-kube-api-access-ncmmz\") pod \"32653060-ac61-4bb1-aea3-836ee0e777cf\" (UID: \"32653060-ac61-4bb1-aea3-836ee0e777cf\") " Jan 04 13:19:15 crc kubenswrapper[5003]: I0104 13:19:15.361220 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32653060-ac61-4bb1-aea3-836ee0e777cf-utilities\") pod \"32653060-ac61-4bb1-aea3-836ee0e777cf\" (UID: \"32653060-ac61-4bb1-aea3-836ee0e777cf\") " Jan 04 13:19:15 crc kubenswrapper[5003]: I0104 13:19:15.361274 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32653060-ac61-4bb1-aea3-836ee0e777cf-catalog-content\") pod \"32653060-ac61-4bb1-aea3-836ee0e777cf\" (UID: \"32653060-ac61-4bb1-aea3-836ee0e777cf\") " Jan 04 13:19:15 crc kubenswrapper[5003]: I0104 13:19:15.362331 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32653060-ac61-4bb1-aea3-836ee0e777cf-utilities" (OuterVolumeSpecName: "utilities") pod "32653060-ac61-4bb1-aea3-836ee0e777cf" (UID: "32653060-ac61-4bb1-aea3-836ee0e777cf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:19:15 crc kubenswrapper[5003]: I0104 13:19:15.370184 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32653060-ac61-4bb1-aea3-836ee0e777cf-kube-api-access-ncmmz" (OuterVolumeSpecName: "kube-api-access-ncmmz") pod "32653060-ac61-4bb1-aea3-836ee0e777cf" (UID: "32653060-ac61-4bb1-aea3-836ee0e777cf"). InnerVolumeSpecName "kube-api-access-ncmmz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:19:15 crc kubenswrapper[5003]: I0104 13:19:15.392403 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32653060-ac61-4bb1-aea3-836ee0e777cf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "32653060-ac61-4bb1-aea3-836ee0e777cf" (UID: "32653060-ac61-4bb1-aea3-836ee0e777cf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:19:15 crc kubenswrapper[5003]: I0104 13:19:15.463550 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ncmmz\" (UniqueName: \"kubernetes.io/projected/32653060-ac61-4bb1-aea3-836ee0e777cf-kube-api-access-ncmmz\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:15 crc kubenswrapper[5003]: I0104 13:19:15.463602 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32653060-ac61-4bb1-aea3-836ee0e777cf-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:15 crc kubenswrapper[5003]: I0104 13:19:15.463618 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32653060-ac61-4bb1-aea3-836ee0e777cf-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:16 crc kubenswrapper[5003]: I0104 13:19:16.225576 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2vwnf" Jan 04 13:19:16 crc kubenswrapper[5003]: I0104 13:19:16.263961 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2vwnf"] Jan 04 13:19:16 crc kubenswrapper[5003]: I0104 13:19:16.270993 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-2vwnf"] Jan 04 13:19:16 crc kubenswrapper[5003]: I0104 13:19:16.820707 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32653060-ac61-4bb1-aea3-836ee0e777cf" path="/var/lib/kubelet/pods/32653060-ac61-4bb1-aea3-836ee0e777cf/volumes" Jan 04 13:19:19 crc kubenswrapper[5003]: I0104 13:19:19.759840 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-659c7d5767-9tcl7" Jan 04 13:19:19 crc kubenswrapper[5003]: I0104 13:19:19.848654 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-699964fbc-8crc2"] Jan 04 13:19:19 crc kubenswrapper[5003]: I0104 13:19:19.849569 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-699964fbc-8crc2" podUID="f98a83d6-cc3b-4cd3-8a74-7d7f951c508a" containerName="dnsmasq-dns" containerID="cri-o://66ea08ebf9f39246682f7bda7683758aaec8ec1db5ab0c4096ccaff0a4b77f4e" gracePeriod=10 Jan 04 13:19:20 crc kubenswrapper[5003]: I0104 13:19:20.272424 5003 generic.go:334] "Generic (PLEG): container finished" podID="f98a83d6-cc3b-4cd3-8a74-7d7f951c508a" containerID="66ea08ebf9f39246682f7bda7683758aaec8ec1db5ab0c4096ccaff0a4b77f4e" exitCode=0 Jan 04 13:19:20 crc kubenswrapper[5003]: I0104 13:19:20.272495 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-699964fbc-8crc2" event={"ID":"f98a83d6-cc3b-4cd3-8a74-7d7f951c508a","Type":"ContainerDied","Data":"66ea08ebf9f39246682f7bda7683758aaec8ec1db5ab0c4096ccaff0a4b77f4e"} Jan 04 13:19:20 crc kubenswrapper[5003]: I0104 13:19:20.441196 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-699964fbc-8crc2" Jan 04 13:19:20 crc kubenswrapper[5003]: I0104 13:19:20.463150 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f98a83d6-cc3b-4cd3-8a74-7d7f951c508a-dns-svc\") pod \"f98a83d6-cc3b-4cd3-8a74-7d7f951c508a\" (UID: \"f98a83d6-cc3b-4cd3-8a74-7d7f951c508a\") " Jan 04 13:19:20 crc kubenswrapper[5003]: I0104 13:19:20.463197 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f98a83d6-cc3b-4cd3-8a74-7d7f951c508a-config\") pod \"f98a83d6-cc3b-4cd3-8a74-7d7f951c508a\" (UID: \"f98a83d6-cc3b-4cd3-8a74-7d7f951c508a\") " Jan 04 13:19:20 crc kubenswrapper[5003]: I0104 13:19:20.463230 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jqtzf\" (UniqueName: \"kubernetes.io/projected/f98a83d6-cc3b-4cd3-8a74-7d7f951c508a-kube-api-access-jqtzf\") pod \"f98a83d6-cc3b-4cd3-8a74-7d7f951c508a\" (UID: \"f98a83d6-cc3b-4cd3-8a74-7d7f951c508a\") " Jan 04 13:19:20 crc kubenswrapper[5003]: I0104 13:19:20.477245 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f98a83d6-cc3b-4cd3-8a74-7d7f951c508a-kube-api-access-jqtzf" (OuterVolumeSpecName: "kube-api-access-jqtzf") pod "f98a83d6-cc3b-4cd3-8a74-7d7f951c508a" (UID: "f98a83d6-cc3b-4cd3-8a74-7d7f951c508a"). InnerVolumeSpecName "kube-api-access-jqtzf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:19:20 crc kubenswrapper[5003]: I0104 13:19:20.528089 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f98a83d6-cc3b-4cd3-8a74-7d7f951c508a-config" (OuterVolumeSpecName: "config") pod "f98a83d6-cc3b-4cd3-8a74-7d7f951c508a" (UID: "f98a83d6-cc3b-4cd3-8a74-7d7f951c508a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 13:19:20 crc kubenswrapper[5003]: I0104 13:19:20.532762 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f98a83d6-cc3b-4cd3-8a74-7d7f951c508a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f98a83d6-cc3b-4cd3-8a74-7d7f951c508a" (UID: "f98a83d6-cc3b-4cd3-8a74-7d7f951c508a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 13:19:20 crc kubenswrapper[5003]: I0104 13:19:20.565606 5003 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f98a83d6-cc3b-4cd3-8a74-7d7f951c508a-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:20 crc kubenswrapper[5003]: I0104 13:19:20.565638 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f98a83d6-cc3b-4cd3-8a74-7d7f951c508a-config\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:20 crc kubenswrapper[5003]: I0104 13:19:20.565650 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jqtzf\" (UniqueName: \"kubernetes.io/projected/f98a83d6-cc3b-4cd3-8a74-7d7f951c508a-kube-api-access-jqtzf\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.197630 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Jan 04 13:19:21 crc kubenswrapper[5003]: E0104 13:19:21.198115 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d729952a-6fc6-43dc-96b7-9a387cf92217" containerName="registry-server" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.198133 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d729952a-6fc6-43dc-96b7-9a387cf92217" containerName="registry-server" Jan 04 13:19:21 crc kubenswrapper[5003]: E0104 13:19:21.198143 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32653060-ac61-4bb1-aea3-836ee0e777cf" containerName="extract-content" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.198149 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="32653060-ac61-4bb1-aea3-836ee0e777cf" containerName="extract-content" Jan 04 13:19:21 crc kubenswrapper[5003]: E0104 13:19:21.198158 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32653060-ac61-4bb1-aea3-836ee0e777cf" containerName="registry-server" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.198164 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="32653060-ac61-4bb1-aea3-836ee0e777cf" containerName="registry-server" Jan 04 13:19:21 crc kubenswrapper[5003]: E0104 13:19:21.198174 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d729952a-6fc6-43dc-96b7-9a387cf92217" containerName="extract-utilities" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.198182 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d729952a-6fc6-43dc-96b7-9a387cf92217" containerName="extract-utilities" Jan 04 13:19:21 crc kubenswrapper[5003]: E0104 13:19:21.198190 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32653060-ac61-4bb1-aea3-836ee0e777cf" containerName="extract-utilities" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.198195 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="32653060-ac61-4bb1-aea3-836ee0e777cf" containerName="extract-utilities" Jan 04 13:19:21 crc kubenswrapper[5003]: E0104 13:19:21.198207 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d729952a-6fc6-43dc-96b7-9a387cf92217" containerName="extract-content" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.198216 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d729952a-6fc6-43dc-96b7-9a387cf92217" containerName="extract-content" Jan 04 13:19:21 crc kubenswrapper[5003]: E0104 13:19:21.198230 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f98a83d6-cc3b-4cd3-8a74-7d7f951c508a" containerName="dnsmasq-dns" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.198236 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f98a83d6-cc3b-4cd3-8a74-7d7f951c508a" containerName="dnsmasq-dns" Jan 04 13:19:21 crc kubenswrapper[5003]: E0104 13:19:21.198253 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f98a83d6-cc3b-4cd3-8a74-7d7f951c508a" containerName="init" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.198260 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="f98a83d6-cc3b-4cd3-8a74-7d7f951c508a" containerName="init" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.198436 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="32653060-ac61-4bb1-aea3-836ee0e777cf" containerName="registry-server" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.198457 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="f98a83d6-cc3b-4cd3-8a74-7d7f951c508a" containerName="dnsmasq-dns" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.198471 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d729952a-6fc6-43dc-96b7-9a387cf92217" containerName="registry-server" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.199552 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.202862 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.203143 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-s557w" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.203419 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.210737 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.245587 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.279672 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/326adc48-46fc-4f3a-bbba-99e081d4f6ff-scripts\") pod \"ovn-northd-0\" (UID: \"326adc48-46fc-4f3a-bbba-99e081d4f6ff\") " pod="openstack/ovn-northd-0" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.279824 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/326adc48-46fc-4f3a-bbba-99e081d4f6ff-config\") pod \"ovn-northd-0\" (UID: \"326adc48-46fc-4f3a-bbba-99e081d4f6ff\") " pod="openstack/ovn-northd-0" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.279963 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/326adc48-46fc-4f3a-bbba-99e081d4f6ff-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"326adc48-46fc-4f3a-bbba-99e081d4f6ff\") " pod="openstack/ovn-northd-0" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.280096 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5glh\" (UniqueName: \"kubernetes.io/projected/326adc48-46fc-4f3a-bbba-99e081d4f6ff-kube-api-access-p5glh\") pod \"ovn-northd-0\" (UID: \"326adc48-46fc-4f3a-bbba-99e081d4f6ff\") " pod="openstack/ovn-northd-0" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.280196 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/326adc48-46fc-4f3a-bbba-99e081d4f6ff-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"326adc48-46fc-4f3a-bbba-99e081d4f6ff\") " pod="openstack/ovn-northd-0" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.280261 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/326adc48-46fc-4f3a-bbba-99e081d4f6ff-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"326adc48-46fc-4f3a-bbba-99e081d4f6ff\") " pod="openstack/ovn-northd-0" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.280294 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/326adc48-46fc-4f3a-bbba-99e081d4f6ff-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"326adc48-46fc-4f3a-bbba-99e081d4f6ff\") " pod="openstack/ovn-northd-0" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.297431 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-699964fbc-8crc2" event={"ID":"f98a83d6-cc3b-4cd3-8a74-7d7f951c508a","Type":"ContainerDied","Data":"444f1f7f729e62a73e13a81d5c8bd6052fe7b19e98060c5757e62c983fff7225"} Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.297507 5003 scope.go:117] "RemoveContainer" containerID="66ea08ebf9f39246682f7bda7683758aaec8ec1db5ab0c4096ccaff0a4b77f4e" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.297704 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-699964fbc-8crc2" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.320759 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-699964fbc-8crc2"] Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.325303 5003 scope.go:117] "RemoveContainer" containerID="2c14c22ab7c22e0811e7b05d28682e63637854b78491edd0277a9fafc181d924" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.327302 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-699964fbc-8crc2"] Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.382007 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/326adc48-46fc-4f3a-bbba-99e081d4f6ff-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"326adc48-46fc-4f3a-bbba-99e081d4f6ff\") " pod="openstack/ovn-northd-0" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.382072 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5glh\" (UniqueName: \"kubernetes.io/projected/326adc48-46fc-4f3a-bbba-99e081d4f6ff-kube-api-access-p5glh\") pod \"ovn-northd-0\" (UID: \"326adc48-46fc-4f3a-bbba-99e081d4f6ff\") " pod="openstack/ovn-northd-0" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.382142 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/326adc48-46fc-4f3a-bbba-99e081d4f6ff-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"326adc48-46fc-4f3a-bbba-99e081d4f6ff\") " pod="openstack/ovn-northd-0" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.382191 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/326adc48-46fc-4f3a-bbba-99e081d4f6ff-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"326adc48-46fc-4f3a-bbba-99e081d4f6ff\") " pod="openstack/ovn-northd-0" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.382220 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/326adc48-46fc-4f3a-bbba-99e081d4f6ff-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"326adc48-46fc-4f3a-bbba-99e081d4f6ff\") " pod="openstack/ovn-northd-0" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.382257 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/326adc48-46fc-4f3a-bbba-99e081d4f6ff-scripts\") pod \"ovn-northd-0\" (UID: \"326adc48-46fc-4f3a-bbba-99e081d4f6ff\") " pod="openstack/ovn-northd-0" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.382283 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/326adc48-46fc-4f3a-bbba-99e081d4f6ff-config\") pod \"ovn-northd-0\" (UID: \"326adc48-46fc-4f3a-bbba-99e081d4f6ff\") " pod="openstack/ovn-northd-0" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.383170 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/326adc48-46fc-4f3a-bbba-99e081d4f6ff-config\") pod \"ovn-northd-0\" (UID: \"326adc48-46fc-4f3a-bbba-99e081d4f6ff\") " pod="openstack/ovn-northd-0" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.383459 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/326adc48-46fc-4f3a-bbba-99e081d4f6ff-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"326adc48-46fc-4f3a-bbba-99e081d4f6ff\") " pod="openstack/ovn-northd-0" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.384239 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/326adc48-46fc-4f3a-bbba-99e081d4f6ff-scripts\") pod \"ovn-northd-0\" (UID: \"326adc48-46fc-4f3a-bbba-99e081d4f6ff\") " pod="openstack/ovn-northd-0" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.391942 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/326adc48-46fc-4f3a-bbba-99e081d4f6ff-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"326adc48-46fc-4f3a-bbba-99e081d4f6ff\") " pod="openstack/ovn-northd-0" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.391961 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/326adc48-46fc-4f3a-bbba-99e081d4f6ff-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"326adc48-46fc-4f3a-bbba-99e081d4f6ff\") " pod="openstack/ovn-northd-0" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.396211 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/326adc48-46fc-4f3a-bbba-99e081d4f6ff-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"326adc48-46fc-4f3a-bbba-99e081d4f6ff\") " pod="openstack/ovn-northd-0" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.403987 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5glh\" (UniqueName: \"kubernetes.io/projected/326adc48-46fc-4f3a-bbba-99e081d4f6ff-kube-api-access-p5glh\") pod \"ovn-northd-0\" (UID: \"326adc48-46fc-4f3a-bbba-99e081d4f6ff\") " pod="openstack/ovn-northd-0" Jan 04 13:19:21 crc kubenswrapper[5003]: I0104 13:19:21.533054 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Jan 04 13:19:22 crc kubenswrapper[5003]: I0104 13:19:22.024930 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Jan 04 13:19:22 crc kubenswrapper[5003]: W0104 13:19:22.030229 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod326adc48_46fc_4f3a_bbba_99e081d4f6ff.slice/crio-89628b4ecbc90d68f7391381ee7de86aa226a6c6ff288d0fb991ffdb3d580b16 WatchSource:0}: Error finding container 89628b4ecbc90d68f7391381ee7de86aa226a6c6ff288d0fb991ffdb3d580b16: Status 404 returned error can't find the container with id 89628b4ecbc90d68f7391381ee7de86aa226a6c6ff288d0fb991ffdb3d580b16 Jan 04 13:19:22 crc kubenswrapper[5003]: I0104 13:19:22.041854 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-lkcf7" Jan 04 13:19:22 crc kubenswrapper[5003]: I0104 13:19:22.110352 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-lkcf7" Jan 04 13:19:22 crc kubenswrapper[5003]: I0104 13:19:22.283098 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lkcf7"] Jan 04 13:19:22 crc kubenswrapper[5003]: I0104 13:19:22.307268 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"326adc48-46fc-4f3a-bbba-99e081d4f6ff","Type":"ContainerStarted","Data":"89628b4ecbc90d68f7391381ee7de86aa226a6c6ff288d0fb991ffdb3d580b16"} Jan 04 13:19:22 crc kubenswrapper[5003]: I0104 13:19:22.819514 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f98a83d6-cc3b-4cd3-8a74-7d7f951c508a" path="/var/lib/kubelet/pods/f98a83d6-cc3b-4cd3-8a74-7d7f951c508a/volumes" Jan 04 13:19:23 crc kubenswrapper[5003]: I0104 13:19:23.318423 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"326adc48-46fc-4f3a-bbba-99e081d4f6ff","Type":"ContainerStarted","Data":"f0eedef1cdded23a70ad01ba6fe96a70a7707e92335552aac173d3eba43d9e46"} Jan 04 13:19:23 crc kubenswrapper[5003]: I0104 13:19:23.318486 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"326adc48-46fc-4f3a-bbba-99e081d4f6ff","Type":"ContainerStarted","Data":"1bde12725844aee0df832a8376d701f5a490636ee6b3b59cdd8e8520d6a06aca"} Jan 04 13:19:23 crc kubenswrapper[5003]: I0104 13:19:23.318834 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-lkcf7" podUID="b025b58a-ca68-4057-b474-e58ca758c727" containerName="registry-server" containerID="cri-o://19f72ce4f7a317cd2259851f9b745318f1d57be5e8038328ecd01f17db7089ac" gracePeriod=2 Jan 04 13:19:23 crc kubenswrapper[5003]: I0104 13:19:23.361850 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.361832968 podStartE2EDuration="2.361832968s" podCreationTimestamp="2026-01-04 13:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 13:19:23.355586232 +0000 UTC m=+5478.828616083" watchObservedRunningTime="2026-01-04 13:19:23.361832968 +0000 UTC m=+5478.834862819" Jan 04 13:19:23 crc kubenswrapper[5003]: I0104 13:19:23.818117 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lkcf7" Jan 04 13:19:23 crc kubenswrapper[5003]: I0104 13:19:23.906744 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b025b58a-ca68-4057-b474-e58ca758c727-utilities\") pod \"b025b58a-ca68-4057-b474-e58ca758c727\" (UID: \"b025b58a-ca68-4057-b474-e58ca758c727\") " Jan 04 13:19:23 crc kubenswrapper[5003]: I0104 13:19:23.906874 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n84k7\" (UniqueName: \"kubernetes.io/projected/b025b58a-ca68-4057-b474-e58ca758c727-kube-api-access-n84k7\") pod \"b025b58a-ca68-4057-b474-e58ca758c727\" (UID: \"b025b58a-ca68-4057-b474-e58ca758c727\") " Jan 04 13:19:23 crc kubenswrapper[5003]: I0104 13:19:23.907075 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b025b58a-ca68-4057-b474-e58ca758c727-catalog-content\") pod \"b025b58a-ca68-4057-b474-e58ca758c727\" (UID: \"b025b58a-ca68-4057-b474-e58ca758c727\") " Jan 04 13:19:23 crc kubenswrapper[5003]: I0104 13:19:23.908060 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b025b58a-ca68-4057-b474-e58ca758c727-utilities" (OuterVolumeSpecName: "utilities") pod "b025b58a-ca68-4057-b474-e58ca758c727" (UID: "b025b58a-ca68-4057-b474-e58ca758c727"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:19:23 crc kubenswrapper[5003]: I0104 13:19:23.916277 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b025b58a-ca68-4057-b474-e58ca758c727-kube-api-access-n84k7" (OuterVolumeSpecName: "kube-api-access-n84k7") pod "b025b58a-ca68-4057-b474-e58ca758c727" (UID: "b025b58a-ca68-4057-b474-e58ca758c727"). InnerVolumeSpecName "kube-api-access-n84k7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:19:24 crc kubenswrapper[5003]: I0104 13:19:24.009975 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b025b58a-ca68-4057-b474-e58ca758c727-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:24 crc kubenswrapper[5003]: I0104 13:19:24.010035 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n84k7\" (UniqueName: \"kubernetes.io/projected/b025b58a-ca68-4057-b474-e58ca758c727-kube-api-access-n84k7\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:24 crc kubenswrapper[5003]: I0104 13:19:24.044400 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b025b58a-ca68-4057-b474-e58ca758c727-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b025b58a-ca68-4057-b474-e58ca758c727" (UID: "b025b58a-ca68-4057-b474-e58ca758c727"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:19:24 crc kubenswrapper[5003]: I0104 13:19:24.112628 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b025b58a-ca68-4057-b474-e58ca758c727-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:24 crc kubenswrapper[5003]: I0104 13:19:24.337086 5003 generic.go:334] "Generic (PLEG): container finished" podID="b025b58a-ca68-4057-b474-e58ca758c727" containerID="19f72ce4f7a317cd2259851f9b745318f1d57be5e8038328ecd01f17db7089ac" exitCode=0 Jan 04 13:19:24 crc kubenswrapper[5003]: I0104 13:19:24.337264 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lkcf7" Jan 04 13:19:24 crc kubenswrapper[5003]: I0104 13:19:24.338200 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lkcf7" event={"ID":"b025b58a-ca68-4057-b474-e58ca758c727","Type":"ContainerDied","Data":"19f72ce4f7a317cd2259851f9b745318f1d57be5e8038328ecd01f17db7089ac"} Jan 04 13:19:24 crc kubenswrapper[5003]: I0104 13:19:24.338229 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lkcf7" event={"ID":"b025b58a-ca68-4057-b474-e58ca758c727","Type":"ContainerDied","Data":"6043afb4160d0975bf7492841e1f730a1f3816cd1d4c4e537839312d3ebf3345"} Jan 04 13:19:24 crc kubenswrapper[5003]: I0104 13:19:24.338245 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Jan 04 13:19:24 crc kubenswrapper[5003]: I0104 13:19:24.338268 5003 scope.go:117] "RemoveContainer" containerID="19f72ce4f7a317cd2259851f9b745318f1d57be5e8038328ecd01f17db7089ac" Jan 04 13:19:24 crc kubenswrapper[5003]: I0104 13:19:24.360231 5003 scope.go:117] "RemoveContainer" containerID="b7e0b80b2489d7d16ce73d2a2b6db6018dd22032514860f249ec4a8128f0ee6a" Jan 04 13:19:24 crc kubenswrapper[5003]: I0104 13:19:24.379237 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lkcf7"] Jan 04 13:19:24 crc kubenswrapper[5003]: I0104 13:19:24.400636 5003 scope.go:117] "RemoveContainer" containerID="0e34f10b6a614f8be35059aa13204147ff2b89620d9a74cc026712a2c7dc78a0" Jan 04 13:19:24 crc kubenswrapper[5003]: I0104 13:19:24.404470 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-lkcf7"] Jan 04 13:19:24 crc kubenswrapper[5003]: I0104 13:19:24.427983 5003 scope.go:117] "RemoveContainer" containerID="19f72ce4f7a317cd2259851f9b745318f1d57be5e8038328ecd01f17db7089ac" Jan 04 13:19:24 crc kubenswrapper[5003]: E0104 13:19:24.428522 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19f72ce4f7a317cd2259851f9b745318f1d57be5e8038328ecd01f17db7089ac\": container with ID starting with 19f72ce4f7a317cd2259851f9b745318f1d57be5e8038328ecd01f17db7089ac not found: ID does not exist" containerID="19f72ce4f7a317cd2259851f9b745318f1d57be5e8038328ecd01f17db7089ac" Jan 04 13:19:24 crc kubenswrapper[5003]: I0104 13:19:24.428555 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19f72ce4f7a317cd2259851f9b745318f1d57be5e8038328ecd01f17db7089ac"} err="failed to get container status \"19f72ce4f7a317cd2259851f9b745318f1d57be5e8038328ecd01f17db7089ac\": rpc error: code = NotFound desc = could not find container \"19f72ce4f7a317cd2259851f9b745318f1d57be5e8038328ecd01f17db7089ac\": container with ID starting with 19f72ce4f7a317cd2259851f9b745318f1d57be5e8038328ecd01f17db7089ac not found: ID does not exist" Jan 04 13:19:24 crc kubenswrapper[5003]: I0104 13:19:24.428580 5003 scope.go:117] "RemoveContainer" containerID="b7e0b80b2489d7d16ce73d2a2b6db6018dd22032514860f249ec4a8128f0ee6a" Jan 04 13:19:24 crc kubenswrapper[5003]: E0104 13:19:24.429048 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7e0b80b2489d7d16ce73d2a2b6db6018dd22032514860f249ec4a8128f0ee6a\": container with ID starting with b7e0b80b2489d7d16ce73d2a2b6db6018dd22032514860f249ec4a8128f0ee6a not found: ID does not exist" containerID="b7e0b80b2489d7d16ce73d2a2b6db6018dd22032514860f249ec4a8128f0ee6a" Jan 04 13:19:24 crc kubenswrapper[5003]: I0104 13:19:24.429070 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7e0b80b2489d7d16ce73d2a2b6db6018dd22032514860f249ec4a8128f0ee6a"} err="failed to get container status \"b7e0b80b2489d7d16ce73d2a2b6db6018dd22032514860f249ec4a8128f0ee6a\": rpc error: code = NotFound desc = could not find container \"b7e0b80b2489d7d16ce73d2a2b6db6018dd22032514860f249ec4a8128f0ee6a\": container with ID starting with b7e0b80b2489d7d16ce73d2a2b6db6018dd22032514860f249ec4a8128f0ee6a not found: ID does not exist" Jan 04 13:19:24 crc kubenswrapper[5003]: I0104 13:19:24.429086 5003 scope.go:117] "RemoveContainer" containerID="0e34f10b6a614f8be35059aa13204147ff2b89620d9a74cc026712a2c7dc78a0" Jan 04 13:19:24 crc kubenswrapper[5003]: E0104 13:19:24.429407 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e34f10b6a614f8be35059aa13204147ff2b89620d9a74cc026712a2c7dc78a0\": container with ID starting with 0e34f10b6a614f8be35059aa13204147ff2b89620d9a74cc026712a2c7dc78a0 not found: ID does not exist" containerID="0e34f10b6a614f8be35059aa13204147ff2b89620d9a74cc026712a2c7dc78a0" Jan 04 13:19:24 crc kubenswrapper[5003]: I0104 13:19:24.429432 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e34f10b6a614f8be35059aa13204147ff2b89620d9a74cc026712a2c7dc78a0"} err="failed to get container status \"0e34f10b6a614f8be35059aa13204147ff2b89620d9a74cc026712a2c7dc78a0\": rpc error: code = NotFound desc = could not find container \"0e34f10b6a614f8be35059aa13204147ff2b89620d9a74cc026712a2c7dc78a0\": container with ID starting with 0e34f10b6a614f8be35059aa13204147ff2b89620d9a74cc026712a2c7dc78a0 not found: ID does not exist" Jan 04 13:19:24 crc kubenswrapper[5003]: I0104 13:19:24.822047 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b025b58a-ca68-4057-b474-e58ca758c727" path="/var/lib/kubelet/pods/b025b58a-ca68-4057-b474-e58ca758c727/volumes" Jan 04 13:19:27 crc kubenswrapper[5003]: I0104 13:19:27.668231 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-pt9b2"] Jan 04 13:19:27 crc kubenswrapper[5003]: E0104 13:19:27.669367 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b025b58a-ca68-4057-b474-e58ca758c727" containerName="extract-utilities" Jan 04 13:19:27 crc kubenswrapper[5003]: I0104 13:19:27.669385 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b025b58a-ca68-4057-b474-e58ca758c727" containerName="extract-utilities" Jan 04 13:19:27 crc kubenswrapper[5003]: E0104 13:19:27.669408 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b025b58a-ca68-4057-b474-e58ca758c727" containerName="extract-content" Jan 04 13:19:27 crc kubenswrapper[5003]: I0104 13:19:27.669415 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b025b58a-ca68-4057-b474-e58ca758c727" containerName="extract-content" Jan 04 13:19:27 crc kubenswrapper[5003]: E0104 13:19:27.669442 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b025b58a-ca68-4057-b474-e58ca758c727" containerName="registry-server" Jan 04 13:19:27 crc kubenswrapper[5003]: I0104 13:19:27.669449 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b025b58a-ca68-4057-b474-e58ca758c727" containerName="registry-server" Jan 04 13:19:27 crc kubenswrapper[5003]: I0104 13:19:27.669611 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b025b58a-ca68-4057-b474-e58ca758c727" containerName="registry-server" Jan 04 13:19:27 crc kubenswrapper[5003]: I0104 13:19:27.670328 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-pt9b2" Jan 04 13:19:27 crc kubenswrapper[5003]: I0104 13:19:27.684102 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-pt9b2"] Jan 04 13:19:27 crc kubenswrapper[5003]: I0104 13:19:27.789168 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9610d2b5-f6c7-4144-a02c-94b8b418f540-operator-scripts\") pod \"keystone-db-create-pt9b2\" (UID: \"9610d2b5-f6c7-4144-a02c-94b8b418f540\") " pod="openstack/keystone-db-create-pt9b2" Jan 04 13:19:27 crc kubenswrapper[5003]: I0104 13:19:27.789458 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6sqp\" (UniqueName: \"kubernetes.io/projected/9610d2b5-f6c7-4144-a02c-94b8b418f540-kube-api-access-c6sqp\") pod \"keystone-db-create-pt9b2\" (UID: \"9610d2b5-f6c7-4144-a02c-94b8b418f540\") " pod="openstack/keystone-db-create-pt9b2" Jan 04 13:19:27 crc kubenswrapper[5003]: I0104 13:19:27.798320 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-80e0-account-create-update-xqp87"] Jan 04 13:19:27 crc kubenswrapper[5003]: I0104 13:19:27.800763 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-80e0-account-create-update-xqp87" Jan 04 13:19:27 crc kubenswrapper[5003]: I0104 13:19:27.808838 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-80e0-account-create-update-xqp87"] Jan 04 13:19:27 crc kubenswrapper[5003]: I0104 13:19:27.822489 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Jan 04 13:19:27 crc kubenswrapper[5003]: I0104 13:19:27.890974 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26b75c4c-5b23-426d-9880-d2d9a8daebd3-operator-scripts\") pod \"keystone-80e0-account-create-update-xqp87\" (UID: \"26b75c4c-5b23-426d-9880-d2d9a8daebd3\") " pod="openstack/keystone-80e0-account-create-update-xqp87" Jan 04 13:19:27 crc kubenswrapper[5003]: I0104 13:19:27.891062 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcx6c\" (UniqueName: \"kubernetes.io/projected/26b75c4c-5b23-426d-9880-d2d9a8daebd3-kube-api-access-jcx6c\") pod \"keystone-80e0-account-create-update-xqp87\" (UID: \"26b75c4c-5b23-426d-9880-d2d9a8daebd3\") " pod="openstack/keystone-80e0-account-create-update-xqp87" Jan 04 13:19:27 crc kubenswrapper[5003]: I0104 13:19:27.891173 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9610d2b5-f6c7-4144-a02c-94b8b418f540-operator-scripts\") pod \"keystone-db-create-pt9b2\" (UID: \"9610d2b5-f6c7-4144-a02c-94b8b418f540\") " pod="openstack/keystone-db-create-pt9b2" Jan 04 13:19:27 crc kubenswrapper[5003]: I0104 13:19:27.891206 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6sqp\" (UniqueName: \"kubernetes.io/projected/9610d2b5-f6c7-4144-a02c-94b8b418f540-kube-api-access-c6sqp\") pod \"keystone-db-create-pt9b2\" (UID: \"9610d2b5-f6c7-4144-a02c-94b8b418f540\") " pod="openstack/keystone-db-create-pt9b2" Jan 04 13:19:27 crc kubenswrapper[5003]: I0104 13:19:27.892987 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9610d2b5-f6c7-4144-a02c-94b8b418f540-operator-scripts\") pod \"keystone-db-create-pt9b2\" (UID: \"9610d2b5-f6c7-4144-a02c-94b8b418f540\") " pod="openstack/keystone-db-create-pt9b2" Jan 04 13:19:27 crc kubenswrapper[5003]: I0104 13:19:27.921780 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6sqp\" (UniqueName: \"kubernetes.io/projected/9610d2b5-f6c7-4144-a02c-94b8b418f540-kube-api-access-c6sqp\") pod \"keystone-db-create-pt9b2\" (UID: \"9610d2b5-f6c7-4144-a02c-94b8b418f540\") " pod="openstack/keystone-db-create-pt9b2" Jan 04 13:19:27 crc kubenswrapper[5003]: I0104 13:19:27.992923 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26b75c4c-5b23-426d-9880-d2d9a8daebd3-operator-scripts\") pod \"keystone-80e0-account-create-update-xqp87\" (UID: \"26b75c4c-5b23-426d-9880-d2d9a8daebd3\") " pod="openstack/keystone-80e0-account-create-update-xqp87" Jan 04 13:19:27 crc kubenswrapper[5003]: I0104 13:19:27.993265 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcx6c\" (UniqueName: \"kubernetes.io/projected/26b75c4c-5b23-426d-9880-d2d9a8daebd3-kube-api-access-jcx6c\") pod \"keystone-80e0-account-create-update-xqp87\" (UID: \"26b75c4c-5b23-426d-9880-d2d9a8daebd3\") " pod="openstack/keystone-80e0-account-create-update-xqp87" Jan 04 13:19:27 crc kubenswrapper[5003]: I0104 13:19:27.994037 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26b75c4c-5b23-426d-9880-d2d9a8daebd3-operator-scripts\") pod \"keystone-80e0-account-create-update-xqp87\" (UID: \"26b75c4c-5b23-426d-9880-d2d9a8daebd3\") " pod="openstack/keystone-80e0-account-create-update-xqp87" Jan 04 13:19:28 crc kubenswrapper[5003]: I0104 13:19:28.013038 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcx6c\" (UniqueName: \"kubernetes.io/projected/26b75c4c-5b23-426d-9880-d2d9a8daebd3-kube-api-access-jcx6c\") pod \"keystone-80e0-account-create-update-xqp87\" (UID: \"26b75c4c-5b23-426d-9880-d2d9a8daebd3\") " pod="openstack/keystone-80e0-account-create-update-xqp87" Jan 04 13:19:28 crc kubenswrapper[5003]: I0104 13:19:28.024534 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-pt9b2" Jan 04 13:19:28 crc kubenswrapper[5003]: I0104 13:19:28.165223 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-80e0-account-create-update-xqp87" Jan 04 13:19:28 crc kubenswrapper[5003]: I0104 13:19:28.469298 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-pt9b2"] Jan 04 13:19:28 crc kubenswrapper[5003]: W0104 13:19:28.623994 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod26b75c4c_5b23_426d_9880_d2d9a8daebd3.slice/crio-1476ffea0adcde8265ab82368f153b36fb7d7340dcf6f351e4374c7bb0aaa1c6 WatchSource:0}: Error finding container 1476ffea0adcde8265ab82368f153b36fb7d7340dcf6f351e4374c7bb0aaa1c6: Status 404 returned error can't find the container with id 1476ffea0adcde8265ab82368f153b36fb7d7340dcf6f351e4374c7bb0aaa1c6 Jan 04 13:19:28 crc kubenswrapper[5003]: I0104 13:19:28.629362 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-80e0-account-create-update-xqp87"] Jan 04 13:19:29 crc kubenswrapper[5003]: I0104 13:19:29.387053 5003 generic.go:334] "Generic (PLEG): container finished" podID="26b75c4c-5b23-426d-9880-d2d9a8daebd3" containerID="73ef38f7b451e0361a06d4b95a7921df33b1aa0769f0c41f275ef2328e599f69" exitCode=0 Jan 04 13:19:29 crc kubenswrapper[5003]: I0104 13:19:29.387127 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-80e0-account-create-update-xqp87" event={"ID":"26b75c4c-5b23-426d-9880-d2d9a8daebd3","Type":"ContainerDied","Data":"73ef38f7b451e0361a06d4b95a7921df33b1aa0769f0c41f275ef2328e599f69"} Jan 04 13:19:29 crc kubenswrapper[5003]: I0104 13:19:29.387459 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-80e0-account-create-update-xqp87" event={"ID":"26b75c4c-5b23-426d-9880-d2d9a8daebd3","Type":"ContainerStarted","Data":"1476ffea0adcde8265ab82368f153b36fb7d7340dcf6f351e4374c7bb0aaa1c6"} Jan 04 13:19:29 crc kubenswrapper[5003]: I0104 13:19:29.388806 5003 generic.go:334] "Generic (PLEG): container finished" podID="9610d2b5-f6c7-4144-a02c-94b8b418f540" containerID="853fffb292aabb26d25e0a179ca851239d0eecd35c7bcaafec51e3df740a6a65" exitCode=0 Jan 04 13:19:29 crc kubenswrapper[5003]: I0104 13:19:29.388854 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-pt9b2" event={"ID":"9610d2b5-f6c7-4144-a02c-94b8b418f540","Type":"ContainerDied","Data":"853fffb292aabb26d25e0a179ca851239d0eecd35c7bcaafec51e3df740a6a65"} Jan 04 13:19:29 crc kubenswrapper[5003]: I0104 13:19:29.388884 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-pt9b2" event={"ID":"9610d2b5-f6c7-4144-a02c-94b8b418f540","Type":"ContainerStarted","Data":"9ba87145c5c5355725dbebe5c06aa57a3bb1198798204f2783b8229406969880"} Jan 04 13:19:30 crc kubenswrapper[5003]: I0104 13:19:30.893162 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-80e0-account-create-update-xqp87" Jan 04 13:19:30 crc kubenswrapper[5003]: I0104 13:19:30.898292 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-pt9b2" Jan 04 13:19:31 crc kubenswrapper[5003]: I0104 13:19:31.059256 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c6sqp\" (UniqueName: \"kubernetes.io/projected/9610d2b5-f6c7-4144-a02c-94b8b418f540-kube-api-access-c6sqp\") pod \"9610d2b5-f6c7-4144-a02c-94b8b418f540\" (UID: \"9610d2b5-f6c7-4144-a02c-94b8b418f540\") " Jan 04 13:19:31 crc kubenswrapper[5003]: I0104 13:19:31.059387 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26b75c4c-5b23-426d-9880-d2d9a8daebd3-operator-scripts\") pod \"26b75c4c-5b23-426d-9880-d2d9a8daebd3\" (UID: \"26b75c4c-5b23-426d-9880-d2d9a8daebd3\") " Jan 04 13:19:31 crc kubenswrapper[5003]: I0104 13:19:31.059438 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jcx6c\" (UniqueName: \"kubernetes.io/projected/26b75c4c-5b23-426d-9880-d2d9a8daebd3-kube-api-access-jcx6c\") pod \"26b75c4c-5b23-426d-9880-d2d9a8daebd3\" (UID: \"26b75c4c-5b23-426d-9880-d2d9a8daebd3\") " Jan 04 13:19:31 crc kubenswrapper[5003]: I0104 13:19:31.059502 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9610d2b5-f6c7-4144-a02c-94b8b418f540-operator-scripts\") pod \"9610d2b5-f6c7-4144-a02c-94b8b418f540\" (UID: \"9610d2b5-f6c7-4144-a02c-94b8b418f540\") " Jan 04 13:19:31 crc kubenswrapper[5003]: I0104 13:19:31.060108 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26b75c4c-5b23-426d-9880-d2d9a8daebd3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "26b75c4c-5b23-426d-9880-d2d9a8daebd3" (UID: "26b75c4c-5b23-426d-9880-d2d9a8daebd3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 13:19:31 crc kubenswrapper[5003]: I0104 13:19:31.060245 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9610d2b5-f6c7-4144-a02c-94b8b418f540-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9610d2b5-f6c7-4144-a02c-94b8b418f540" (UID: "9610d2b5-f6c7-4144-a02c-94b8b418f540"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 13:19:31 crc kubenswrapper[5003]: I0104 13:19:31.065437 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9610d2b5-f6c7-4144-a02c-94b8b418f540-kube-api-access-c6sqp" (OuterVolumeSpecName: "kube-api-access-c6sqp") pod "9610d2b5-f6c7-4144-a02c-94b8b418f540" (UID: "9610d2b5-f6c7-4144-a02c-94b8b418f540"). InnerVolumeSpecName "kube-api-access-c6sqp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:19:31 crc kubenswrapper[5003]: I0104 13:19:31.065878 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26b75c4c-5b23-426d-9880-d2d9a8daebd3-kube-api-access-jcx6c" (OuterVolumeSpecName: "kube-api-access-jcx6c") pod "26b75c4c-5b23-426d-9880-d2d9a8daebd3" (UID: "26b75c4c-5b23-426d-9880-d2d9a8daebd3"). InnerVolumeSpecName "kube-api-access-jcx6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:19:31 crc kubenswrapper[5003]: I0104 13:19:31.160966 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c6sqp\" (UniqueName: \"kubernetes.io/projected/9610d2b5-f6c7-4144-a02c-94b8b418f540-kube-api-access-c6sqp\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:31 crc kubenswrapper[5003]: I0104 13:19:31.161001 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26b75c4c-5b23-426d-9880-d2d9a8daebd3-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:31 crc kubenswrapper[5003]: I0104 13:19:31.161024 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jcx6c\" (UniqueName: \"kubernetes.io/projected/26b75c4c-5b23-426d-9880-d2d9a8daebd3-kube-api-access-jcx6c\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:31 crc kubenswrapper[5003]: I0104 13:19:31.161033 5003 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9610d2b5-f6c7-4144-a02c-94b8b418f540-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:31 crc kubenswrapper[5003]: I0104 13:19:31.404990 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-80e0-account-create-update-xqp87" event={"ID":"26b75c4c-5b23-426d-9880-d2d9a8daebd3","Type":"ContainerDied","Data":"1476ffea0adcde8265ab82368f153b36fb7d7340dcf6f351e4374c7bb0aaa1c6"} Jan 04 13:19:31 crc kubenswrapper[5003]: I0104 13:19:31.405069 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1476ffea0adcde8265ab82368f153b36fb7d7340dcf6f351e4374c7bb0aaa1c6" Jan 04 13:19:31 crc kubenswrapper[5003]: I0104 13:19:31.405038 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-80e0-account-create-update-xqp87" Jan 04 13:19:31 crc kubenswrapper[5003]: I0104 13:19:31.407121 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-pt9b2" event={"ID":"9610d2b5-f6c7-4144-a02c-94b8b418f540","Type":"ContainerDied","Data":"9ba87145c5c5355725dbebe5c06aa57a3bb1198798204f2783b8229406969880"} Jan 04 13:19:31 crc kubenswrapper[5003]: I0104 13:19:31.407148 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9ba87145c5c5355725dbebe5c06aa57a3bb1198798204f2783b8229406969880" Jan 04 13:19:31 crc kubenswrapper[5003]: I0104 13:19:31.407199 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-pt9b2" Jan 04 13:19:33 crc kubenswrapper[5003]: I0104 13:19:33.403893 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-622cf"] Jan 04 13:19:33 crc kubenswrapper[5003]: E0104 13:19:33.404657 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9610d2b5-f6c7-4144-a02c-94b8b418f540" containerName="mariadb-database-create" Jan 04 13:19:33 crc kubenswrapper[5003]: I0104 13:19:33.404681 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="9610d2b5-f6c7-4144-a02c-94b8b418f540" containerName="mariadb-database-create" Jan 04 13:19:33 crc kubenswrapper[5003]: E0104 13:19:33.404708 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26b75c4c-5b23-426d-9880-d2d9a8daebd3" containerName="mariadb-account-create-update" Jan 04 13:19:33 crc kubenswrapper[5003]: I0104 13:19:33.404720 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="26b75c4c-5b23-426d-9880-d2d9a8daebd3" containerName="mariadb-account-create-update" Jan 04 13:19:33 crc kubenswrapper[5003]: I0104 13:19:33.404940 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="26b75c4c-5b23-426d-9880-d2d9a8daebd3" containerName="mariadb-account-create-update" Jan 04 13:19:33 crc kubenswrapper[5003]: I0104 13:19:33.404980 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="9610d2b5-f6c7-4144-a02c-94b8b418f540" containerName="mariadb-database-create" Jan 04 13:19:33 crc kubenswrapper[5003]: I0104 13:19:33.405665 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-622cf" Jan 04 13:19:33 crc kubenswrapper[5003]: I0104 13:19:33.409096 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 04 13:19:33 crc kubenswrapper[5003]: I0104 13:19:33.409318 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-htkmt" Jan 04 13:19:33 crc kubenswrapper[5003]: I0104 13:19:33.409552 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 04 13:19:33 crc kubenswrapper[5003]: I0104 13:19:33.414403 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 04 13:19:33 crc kubenswrapper[5003]: I0104 13:19:33.415810 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-622cf"] Jan 04 13:19:33 crc kubenswrapper[5003]: I0104 13:19:33.530341 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05dffc83-fce8-4e1c-8a0a-9120efe4b704-combined-ca-bundle\") pod \"keystone-db-sync-622cf\" (UID: \"05dffc83-fce8-4e1c-8a0a-9120efe4b704\") " pod="openstack/keystone-db-sync-622cf" Jan 04 13:19:33 crc kubenswrapper[5003]: I0104 13:19:33.530958 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05dffc83-fce8-4e1c-8a0a-9120efe4b704-config-data\") pod \"keystone-db-sync-622cf\" (UID: \"05dffc83-fce8-4e1c-8a0a-9120efe4b704\") " pod="openstack/keystone-db-sync-622cf" Jan 04 13:19:33 crc kubenswrapper[5003]: I0104 13:19:33.531144 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97pct\" (UniqueName: \"kubernetes.io/projected/05dffc83-fce8-4e1c-8a0a-9120efe4b704-kube-api-access-97pct\") pod \"keystone-db-sync-622cf\" (UID: \"05dffc83-fce8-4e1c-8a0a-9120efe4b704\") " pod="openstack/keystone-db-sync-622cf" Jan 04 13:19:33 crc kubenswrapper[5003]: I0104 13:19:33.632459 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05dffc83-fce8-4e1c-8a0a-9120efe4b704-combined-ca-bundle\") pod \"keystone-db-sync-622cf\" (UID: \"05dffc83-fce8-4e1c-8a0a-9120efe4b704\") " pod="openstack/keystone-db-sync-622cf" Jan 04 13:19:33 crc kubenswrapper[5003]: I0104 13:19:33.632613 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05dffc83-fce8-4e1c-8a0a-9120efe4b704-config-data\") pod \"keystone-db-sync-622cf\" (UID: \"05dffc83-fce8-4e1c-8a0a-9120efe4b704\") " pod="openstack/keystone-db-sync-622cf" Jan 04 13:19:33 crc kubenswrapper[5003]: I0104 13:19:33.632678 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97pct\" (UniqueName: \"kubernetes.io/projected/05dffc83-fce8-4e1c-8a0a-9120efe4b704-kube-api-access-97pct\") pod \"keystone-db-sync-622cf\" (UID: \"05dffc83-fce8-4e1c-8a0a-9120efe4b704\") " pod="openstack/keystone-db-sync-622cf" Jan 04 13:19:33 crc kubenswrapper[5003]: I0104 13:19:33.643354 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05dffc83-fce8-4e1c-8a0a-9120efe4b704-combined-ca-bundle\") pod \"keystone-db-sync-622cf\" (UID: \"05dffc83-fce8-4e1c-8a0a-9120efe4b704\") " pod="openstack/keystone-db-sync-622cf" Jan 04 13:19:33 crc kubenswrapper[5003]: I0104 13:19:33.653927 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05dffc83-fce8-4e1c-8a0a-9120efe4b704-config-data\") pod \"keystone-db-sync-622cf\" (UID: \"05dffc83-fce8-4e1c-8a0a-9120efe4b704\") " pod="openstack/keystone-db-sync-622cf" Jan 04 13:19:33 crc kubenswrapper[5003]: I0104 13:19:33.664607 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97pct\" (UniqueName: \"kubernetes.io/projected/05dffc83-fce8-4e1c-8a0a-9120efe4b704-kube-api-access-97pct\") pod \"keystone-db-sync-622cf\" (UID: \"05dffc83-fce8-4e1c-8a0a-9120efe4b704\") " pod="openstack/keystone-db-sync-622cf" Jan 04 13:19:33 crc kubenswrapper[5003]: I0104 13:19:33.737954 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-622cf" Jan 04 13:19:34 crc kubenswrapper[5003]: I0104 13:19:34.310197 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-622cf"] Jan 04 13:19:34 crc kubenswrapper[5003]: W0104 13:19:34.310435 5003 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod05dffc83_fce8_4e1c_8a0a_9120efe4b704.slice/crio-2ef945bc9f234a49f1ff61c980f49f49194d5e3e5865542eb48594db3fdd50ea WatchSource:0}: Error finding container 2ef945bc9f234a49f1ff61c980f49f49194d5e3e5865542eb48594db3fdd50ea: Status 404 returned error can't find the container with id 2ef945bc9f234a49f1ff61c980f49f49194d5e3e5865542eb48594db3fdd50ea Jan 04 13:19:34 crc kubenswrapper[5003]: I0104 13:19:34.432949 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-622cf" event={"ID":"05dffc83-fce8-4e1c-8a0a-9120efe4b704","Type":"ContainerStarted","Data":"2ef945bc9f234a49f1ff61c980f49f49194d5e3e5865542eb48594db3fdd50ea"} Jan 04 13:19:35 crc kubenswrapper[5003]: I0104 13:19:35.443914 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-622cf" event={"ID":"05dffc83-fce8-4e1c-8a0a-9120efe4b704","Type":"ContainerStarted","Data":"7436c743fbfb0afd11294e951a578985f3be67ebcdd4e2e8d2f5aa311ca1735f"} Jan 04 13:19:35 crc kubenswrapper[5003]: I0104 13:19:35.470524 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-622cf" podStartSLOduration=2.47050808 podStartE2EDuration="2.47050808s" podCreationTimestamp="2026-01-04 13:19:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 13:19:35.465512307 +0000 UTC m=+5490.938542148" watchObservedRunningTime="2026-01-04 13:19:35.47050808 +0000 UTC m=+5490.943537921" Jan 04 13:19:36 crc kubenswrapper[5003]: I0104 13:19:36.456179 5003 generic.go:334] "Generic (PLEG): container finished" podID="05dffc83-fce8-4e1c-8a0a-9120efe4b704" containerID="7436c743fbfb0afd11294e951a578985f3be67ebcdd4e2e8d2f5aa311ca1735f" exitCode=0 Jan 04 13:19:36 crc kubenswrapper[5003]: I0104 13:19:36.456231 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-622cf" event={"ID":"05dffc83-fce8-4e1c-8a0a-9120efe4b704","Type":"ContainerDied","Data":"7436c743fbfb0afd11294e951a578985f3be67ebcdd4e2e8d2f5aa311ca1735f"} Jan 04 13:19:36 crc kubenswrapper[5003]: I0104 13:19:36.588143 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Jan 04 13:19:37 crc kubenswrapper[5003]: I0104 13:19:37.805078 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-622cf" Jan 04 13:19:37 crc kubenswrapper[5003]: I0104 13:19:37.933775 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-97pct\" (UniqueName: \"kubernetes.io/projected/05dffc83-fce8-4e1c-8a0a-9120efe4b704-kube-api-access-97pct\") pod \"05dffc83-fce8-4e1c-8a0a-9120efe4b704\" (UID: \"05dffc83-fce8-4e1c-8a0a-9120efe4b704\") " Jan 04 13:19:37 crc kubenswrapper[5003]: I0104 13:19:37.933829 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05dffc83-fce8-4e1c-8a0a-9120efe4b704-config-data\") pod \"05dffc83-fce8-4e1c-8a0a-9120efe4b704\" (UID: \"05dffc83-fce8-4e1c-8a0a-9120efe4b704\") " Jan 04 13:19:37 crc kubenswrapper[5003]: I0104 13:19:37.933871 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05dffc83-fce8-4e1c-8a0a-9120efe4b704-combined-ca-bundle\") pod \"05dffc83-fce8-4e1c-8a0a-9120efe4b704\" (UID: \"05dffc83-fce8-4e1c-8a0a-9120efe4b704\") " Jan 04 13:19:37 crc kubenswrapper[5003]: I0104 13:19:37.939445 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05dffc83-fce8-4e1c-8a0a-9120efe4b704-kube-api-access-97pct" (OuterVolumeSpecName: "kube-api-access-97pct") pod "05dffc83-fce8-4e1c-8a0a-9120efe4b704" (UID: "05dffc83-fce8-4e1c-8a0a-9120efe4b704"). InnerVolumeSpecName "kube-api-access-97pct". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:19:37 crc kubenswrapper[5003]: I0104 13:19:37.979202 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05dffc83-fce8-4e1c-8a0a-9120efe4b704-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "05dffc83-fce8-4e1c-8a0a-9120efe4b704" (UID: "05dffc83-fce8-4e1c-8a0a-9120efe4b704"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.012213 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05dffc83-fce8-4e1c-8a0a-9120efe4b704-config-data" (OuterVolumeSpecName: "config-data") pod "05dffc83-fce8-4e1c-8a0a-9120efe4b704" (UID: "05dffc83-fce8-4e1c-8a0a-9120efe4b704"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.037685 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-97pct\" (UniqueName: \"kubernetes.io/projected/05dffc83-fce8-4e1c-8a0a-9120efe4b704-kube-api-access-97pct\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.037718 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05dffc83-fce8-4e1c-8a0a-9120efe4b704-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.037729 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05dffc83-fce8-4e1c-8a0a-9120efe4b704-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.222381 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-dls8m"] Jan 04 13:19:38 crc kubenswrapper[5003]: E0104 13:19:38.222709 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05dffc83-fce8-4e1c-8a0a-9120efe4b704" containerName="keystone-db-sync" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.222725 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="05dffc83-fce8-4e1c-8a0a-9120efe4b704" containerName="keystone-db-sync" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.222888 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="05dffc83-fce8-4e1c-8a0a-9120efe4b704" containerName="keystone-db-sync" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.223426 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dls8m" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.226006 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.246171 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-scripts\") pod \"keystone-bootstrap-dls8m\" (UID: \"aafb1b3a-241d-41ba-8b4e-c54806bc2c43\") " pod="openstack/keystone-bootstrap-dls8m" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.246223 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-fernet-keys\") pod \"keystone-bootstrap-dls8m\" (UID: \"aafb1b3a-241d-41ba-8b4e-c54806bc2c43\") " pod="openstack/keystone-bootstrap-dls8m" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.246253 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-combined-ca-bundle\") pod \"keystone-bootstrap-dls8m\" (UID: \"aafb1b3a-241d-41ba-8b4e-c54806bc2c43\") " pod="openstack/keystone-bootstrap-dls8m" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.246278 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5crh6\" (UniqueName: \"kubernetes.io/projected/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-kube-api-access-5crh6\") pod \"keystone-bootstrap-dls8m\" (UID: \"aafb1b3a-241d-41ba-8b4e-c54806bc2c43\") " pod="openstack/keystone-bootstrap-dls8m" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.246335 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-credential-keys\") pod \"keystone-bootstrap-dls8m\" (UID: \"aafb1b3a-241d-41ba-8b4e-c54806bc2c43\") " pod="openstack/keystone-bootstrap-dls8m" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.246385 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-config-data\") pod \"keystone-bootstrap-dls8m\" (UID: \"aafb1b3a-241d-41ba-8b4e-c54806bc2c43\") " pod="openstack/keystone-bootstrap-dls8m" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.250524 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-dls8m"] Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.281203 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6d997bd4b5-nmr2n"] Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.282580 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d997bd4b5-nmr2n" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.305938 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d997bd4b5-nmr2n"] Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.346788 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-combined-ca-bundle\") pod \"keystone-bootstrap-dls8m\" (UID: \"aafb1b3a-241d-41ba-8b4e-c54806bc2c43\") " pod="openstack/keystone-bootstrap-dls8m" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.347478 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5crh6\" (UniqueName: \"kubernetes.io/projected/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-kube-api-access-5crh6\") pod \"keystone-bootstrap-dls8m\" (UID: \"aafb1b3a-241d-41ba-8b4e-c54806bc2c43\") " pod="openstack/keystone-bootstrap-dls8m" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.347520 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/301f653f-93ac-4940-9505-32414c777153-dns-svc\") pod \"dnsmasq-dns-6d997bd4b5-nmr2n\" (UID: \"301f653f-93ac-4940-9505-32414c777153\") " pod="openstack/dnsmasq-dns-6d997bd4b5-nmr2n" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.347591 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-credential-keys\") pod \"keystone-bootstrap-dls8m\" (UID: \"aafb1b3a-241d-41ba-8b4e-c54806bc2c43\") " pod="openstack/keystone-bootstrap-dls8m" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.347624 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-config-data\") pod \"keystone-bootstrap-dls8m\" (UID: \"aafb1b3a-241d-41ba-8b4e-c54806bc2c43\") " pod="openstack/keystone-bootstrap-dls8m" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.347710 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/301f653f-93ac-4940-9505-32414c777153-config\") pod \"dnsmasq-dns-6d997bd4b5-nmr2n\" (UID: \"301f653f-93ac-4940-9505-32414c777153\") " pod="openstack/dnsmasq-dns-6d997bd4b5-nmr2n" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.347814 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/301f653f-93ac-4940-9505-32414c777153-ovsdbserver-sb\") pod \"dnsmasq-dns-6d997bd4b5-nmr2n\" (UID: \"301f653f-93ac-4940-9505-32414c777153\") " pod="openstack/dnsmasq-dns-6d997bd4b5-nmr2n" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.347874 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/301f653f-93ac-4940-9505-32414c777153-ovsdbserver-nb\") pod \"dnsmasq-dns-6d997bd4b5-nmr2n\" (UID: \"301f653f-93ac-4940-9505-32414c777153\") " pod="openstack/dnsmasq-dns-6d997bd4b5-nmr2n" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.348062 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-scripts\") pod \"keystone-bootstrap-dls8m\" (UID: \"aafb1b3a-241d-41ba-8b4e-c54806bc2c43\") " pod="openstack/keystone-bootstrap-dls8m" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.348171 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m96qz\" (UniqueName: \"kubernetes.io/projected/301f653f-93ac-4940-9505-32414c777153-kube-api-access-m96qz\") pod \"dnsmasq-dns-6d997bd4b5-nmr2n\" (UID: \"301f653f-93ac-4940-9505-32414c777153\") " pod="openstack/dnsmasq-dns-6d997bd4b5-nmr2n" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.348207 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-fernet-keys\") pod \"keystone-bootstrap-dls8m\" (UID: \"aafb1b3a-241d-41ba-8b4e-c54806bc2c43\") " pod="openstack/keystone-bootstrap-dls8m" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.351213 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-combined-ca-bundle\") pod \"keystone-bootstrap-dls8m\" (UID: \"aafb1b3a-241d-41ba-8b4e-c54806bc2c43\") " pod="openstack/keystone-bootstrap-dls8m" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.353415 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-scripts\") pod \"keystone-bootstrap-dls8m\" (UID: \"aafb1b3a-241d-41ba-8b4e-c54806bc2c43\") " pod="openstack/keystone-bootstrap-dls8m" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.353954 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-config-data\") pod \"keystone-bootstrap-dls8m\" (UID: \"aafb1b3a-241d-41ba-8b4e-c54806bc2c43\") " pod="openstack/keystone-bootstrap-dls8m" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.354548 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-fernet-keys\") pod \"keystone-bootstrap-dls8m\" (UID: \"aafb1b3a-241d-41ba-8b4e-c54806bc2c43\") " pod="openstack/keystone-bootstrap-dls8m" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.360951 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-credential-keys\") pod \"keystone-bootstrap-dls8m\" (UID: \"aafb1b3a-241d-41ba-8b4e-c54806bc2c43\") " pod="openstack/keystone-bootstrap-dls8m" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.369032 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5crh6\" (UniqueName: \"kubernetes.io/projected/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-kube-api-access-5crh6\") pod \"keystone-bootstrap-dls8m\" (UID: \"aafb1b3a-241d-41ba-8b4e-c54806bc2c43\") " pod="openstack/keystone-bootstrap-dls8m" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.450392 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/301f653f-93ac-4940-9505-32414c777153-ovsdbserver-sb\") pod \"dnsmasq-dns-6d997bd4b5-nmr2n\" (UID: \"301f653f-93ac-4940-9505-32414c777153\") " pod="openstack/dnsmasq-dns-6d997bd4b5-nmr2n" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.450438 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/301f653f-93ac-4940-9505-32414c777153-ovsdbserver-nb\") pod \"dnsmasq-dns-6d997bd4b5-nmr2n\" (UID: \"301f653f-93ac-4940-9505-32414c777153\") " pod="openstack/dnsmasq-dns-6d997bd4b5-nmr2n" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.450508 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m96qz\" (UniqueName: \"kubernetes.io/projected/301f653f-93ac-4940-9505-32414c777153-kube-api-access-m96qz\") pod \"dnsmasq-dns-6d997bd4b5-nmr2n\" (UID: \"301f653f-93ac-4940-9505-32414c777153\") " pod="openstack/dnsmasq-dns-6d997bd4b5-nmr2n" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.450548 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/301f653f-93ac-4940-9505-32414c777153-dns-svc\") pod \"dnsmasq-dns-6d997bd4b5-nmr2n\" (UID: \"301f653f-93ac-4940-9505-32414c777153\") " pod="openstack/dnsmasq-dns-6d997bd4b5-nmr2n" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.450604 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/301f653f-93ac-4940-9505-32414c777153-config\") pod \"dnsmasq-dns-6d997bd4b5-nmr2n\" (UID: \"301f653f-93ac-4940-9505-32414c777153\") " pod="openstack/dnsmasq-dns-6d997bd4b5-nmr2n" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.451357 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/301f653f-93ac-4940-9505-32414c777153-ovsdbserver-sb\") pod \"dnsmasq-dns-6d997bd4b5-nmr2n\" (UID: \"301f653f-93ac-4940-9505-32414c777153\") " pod="openstack/dnsmasq-dns-6d997bd4b5-nmr2n" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.451466 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/301f653f-93ac-4940-9505-32414c777153-config\") pod \"dnsmasq-dns-6d997bd4b5-nmr2n\" (UID: \"301f653f-93ac-4940-9505-32414c777153\") " pod="openstack/dnsmasq-dns-6d997bd4b5-nmr2n" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.451466 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/301f653f-93ac-4940-9505-32414c777153-ovsdbserver-nb\") pod \"dnsmasq-dns-6d997bd4b5-nmr2n\" (UID: \"301f653f-93ac-4940-9505-32414c777153\") " pod="openstack/dnsmasq-dns-6d997bd4b5-nmr2n" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.451658 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/301f653f-93ac-4940-9505-32414c777153-dns-svc\") pod \"dnsmasq-dns-6d997bd4b5-nmr2n\" (UID: \"301f653f-93ac-4940-9505-32414c777153\") " pod="openstack/dnsmasq-dns-6d997bd4b5-nmr2n" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.471766 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m96qz\" (UniqueName: \"kubernetes.io/projected/301f653f-93ac-4940-9505-32414c777153-kube-api-access-m96qz\") pod \"dnsmasq-dns-6d997bd4b5-nmr2n\" (UID: \"301f653f-93ac-4940-9505-32414c777153\") " pod="openstack/dnsmasq-dns-6d997bd4b5-nmr2n" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.473484 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-622cf" event={"ID":"05dffc83-fce8-4e1c-8a0a-9120efe4b704","Type":"ContainerDied","Data":"2ef945bc9f234a49f1ff61c980f49f49194d5e3e5865542eb48594db3fdd50ea"} Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.473533 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2ef945bc9f234a49f1ff61c980f49f49194d5e3e5865542eb48594db3fdd50ea" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.473596 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-622cf" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.537628 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dls8m" Jan 04 13:19:38 crc kubenswrapper[5003]: I0104 13:19:38.603287 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d997bd4b5-nmr2n" Jan 04 13:19:39 crc kubenswrapper[5003]: I0104 13:19:39.065493 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-dls8m"] Jan 04 13:19:39 crc kubenswrapper[5003]: I0104 13:19:39.153317 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d997bd4b5-nmr2n"] Jan 04 13:19:39 crc kubenswrapper[5003]: I0104 13:19:39.419045 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 13:19:39 crc kubenswrapper[5003]: I0104 13:19:39.419142 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 13:19:39 crc kubenswrapper[5003]: I0104 13:19:39.419217 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 13:19:39 crc kubenswrapper[5003]: I0104 13:19:39.420469 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"97829298803abd5773d5e3270e0265b933495a48a989f0b5b421d823db2a3293"} pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 13:19:39 crc kubenswrapper[5003]: I0104 13:19:39.420536 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" containerID="cri-o://97829298803abd5773d5e3270e0265b933495a48a989f0b5b421d823db2a3293" gracePeriod=600 Jan 04 13:19:39 crc kubenswrapper[5003]: I0104 13:19:39.483069 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dls8m" event={"ID":"aafb1b3a-241d-41ba-8b4e-c54806bc2c43","Type":"ContainerStarted","Data":"62ccd8fa89d3b453df82e0bc1cdfe478a889d349f3b21b45518caa1b7bb53e80"} Jan 04 13:19:39 crc kubenswrapper[5003]: I0104 13:19:39.483484 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dls8m" event={"ID":"aafb1b3a-241d-41ba-8b4e-c54806bc2c43","Type":"ContainerStarted","Data":"dcf6a7f71b46c7d4f559d930e8aee22e0818b4450e214ade80cdbdf9ebb24656"} Jan 04 13:19:39 crc kubenswrapper[5003]: I0104 13:19:39.485200 5003 generic.go:334] "Generic (PLEG): container finished" podID="301f653f-93ac-4940-9505-32414c777153" containerID="29e6d40ed480b16a265a0c7f36cf185e9bf176978bc731671f56235f9794fc29" exitCode=0 Jan 04 13:19:39 crc kubenswrapper[5003]: I0104 13:19:39.485238 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d997bd4b5-nmr2n" event={"ID":"301f653f-93ac-4940-9505-32414c777153","Type":"ContainerDied","Data":"29e6d40ed480b16a265a0c7f36cf185e9bf176978bc731671f56235f9794fc29"} Jan 04 13:19:39 crc kubenswrapper[5003]: I0104 13:19:39.485254 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d997bd4b5-nmr2n" event={"ID":"301f653f-93ac-4940-9505-32414c777153","Type":"ContainerStarted","Data":"2825341bbd21029d70cf94806cc0510219b852921f1a7b11edee93ed4f16188b"} Jan 04 13:19:39 crc kubenswrapper[5003]: I0104 13:19:39.517189 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-dls8m" podStartSLOduration=1.5171701610000001 podStartE2EDuration="1.517170161s" podCreationTimestamp="2026-01-04 13:19:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 13:19:39.512726404 +0000 UTC m=+5494.985756245" watchObservedRunningTime="2026-01-04 13:19:39.517170161 +0000 UTC m=+5494.990199992" Jan 04 13:19:40 crc kubenswrapper[5003]: I0104 13:19:40.500899 5003 generic.go:334] "Generic (PLEG): container finished" podID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerID="97829298803abd5773d5e3270e0265b933495a48a989f0b5b421d823db2a3293" exitCode=0 Jan 04 13:19:40 crc kubenswrapper[5003]: I0104 13:19:40.500974 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerDied","Data":"97829298803abd5773d5e3270e0265b933495a48a989f0b5b421d823db2a3293"} Jan 04 13:19:40 crc kubenswrapper[5003]: I0104 13:19:40.502024 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerStarted","Data":"7aa67a3c5c4fdbbd2e40aec0730029c1a3d38ba544747c5e09a0aef3629ccda4"} Jan 04 13:19:40 crc kubenswrapper[5003]: I0104 13:19:40.502071 5003 scope.go:117] "RemoveContainer" containerID="ab89a3503b868c615a10b0dddcf9602282c1df798b3f18e301d8cf21d5ab3516" Jan 04 13:19:40 crc kubenswrapper[5003]: I0104 13:19:40.507008 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d997bd4b5-nmr2n" event={"ID":"301f653f-93ac-4940-9505-32414c777153","Type":"ContainerStarted","Data":"37c6deb47e928a11bc920b1e2accfea8098bd6431bd1acbbd31688f18f730c1a"} Jan 04 13:19:40 crc kubenswrapper[5003]: I0104 13:19:40.507193 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6d997bd4b5-nmr2n" Jan 04 13:19:40 crc kubenswrapper[5003]: I0104 13:19:40.562708 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6d997bd4b5-nmr2n" podStartSLOduration=2.562680595 podStartE2EDuration="2.562680595s" podCreationTimestamp="2026-01-04 13:19:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 13:19:40.557630763 +0000 UTC m=+5496.030660634" watchObservedRunningTime="2026-01-04 13:19:40.562680595 +0000 UTC m=+5496.035710436" Jan 04 13:19:44 crc kubenswrapper[5003]: I0104 13:19:44.558430 5003 generic.go:334] "Generic (PLEG): container finished" podID="aafb1b3a-241d-41ba-8b4e-c54806bc2c43" containerID="62ccd8fa89d3b453df82e0bc1cdfe478a889d349f3b21b45518caa1b7bb53e80" exitCode=0 Jan 04 13:19:44 crc kubenswrapper[5003]: I0104 13:19:44.558538 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dls8m" event={"ID":"aafb1b3a-241d-41ba-8b4e-c54806bc2c43","Type":"ContainerDied","Data":"62ccd8fa89d3b453df82e0bc1cdfe478a889d349f3b21b45518caa1b7bb53e80"} Jan 04 13:19:45 crc kubenswrapper[5003]: I0104 13:19:45.963192 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dls8m" Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.130443 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-credential-keys\") pod \"aafb1b3a-241d-41ba-8b4e-c54806bc2c43\" (UID: \"aafb1b3a-241d-41ba-8b4e-c54806bc2c43\") " Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.130650 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-scripts\") pod \"aafb1b3a-241d-41ba-8b4e-c54806bc2c43\" (UID: \"aafb1b3a-241d-41ba-8b4e-c54806bc2c43\") " Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.130766 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-config-data\") pod \"aafb1b3a-241d-41ba-8b4e-c54806bc2c43\" (UID: \"aafb1b3a-241d-41ba-8b4e-c54806bc2c43\") " Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.130859 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5crh6\" (UniqueName: \"kubernetes.io/projected/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-kube-api-access-5crh6\") pod \"aafb1b3a-241d-41ba-8b4e-c54806bc2c43\" (UID: \"aafb1b3a-241d-41ba-8b4e-c54806bc2c43\") " Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.131132 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-combined-ca-bundle\") pod \"aafb1b3a-241d-41ba-8b4e-c54806bc2c43\" (UID: \"aafb1b3a-241d-41ba-8b4e-c54806bc2c43\") " Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.131173 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-fernet-keys\") pod \"aafb1b3a-241d-41ba-8b4e-c54806bc2c43\" (UID: \"aafb1b3a-241d-41ba-8b4e-c54806bc2c43\") " Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.139854 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "aafb1b3a-241d-41ba-8b4e-c54806bc2c43" (UID: "aafb1b3a-241d-41ba-8b4e-c54806bc2c43"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.140510 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "aafb1b3a-241d-41ba-8b4e-c54806bc2c43" (UID: "aafb1b3a-241d-41ba-8b4e-c54806bc2c43"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.141084 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-kube-api-access-5crh6" (OuterVolumeSpecName: "kube-api-access-5crh6") pod "aafb1b3a-241d-41ba-8b4e-c54806bc2c43" (UID: "aafb1b3a-241d-41ba-8b4e-c54806bc2c43"). InnerVolumeSpecName "kube-api-access-5crh6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.145831 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-scripts" (OuterVolumeSpecName: "scripts") pod "aafb1b3a-241d-41ba-8b4e-c54806bc2c43" (UID: "aafb1b3a-241d-41ba-8b4e-c54806bc2c43"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.170937 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-config-data" (OuterVolumeSpecName: "config-data") pod "aafb1b3a-241d-41ba-8b4e-c54806bc2c43" (UID: "aafb1b3a-241d-41ba-8b4e-c54806bc2c43"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.181355 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aafb1b3a-241d-41ba-8b4e-c54806bc2c43" (UID: "aafb1b3a-241d-41ba-8b4e-c54806bc2c43"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.234118 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5crh6\" (UniqueName: \"kubernetes.io/projected/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-kube-api-access-5crh6\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.234195 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.234216 5003 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.234236 5003 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.234253 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.234273 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aafb1b3a-241d-41ba-8b4e-c54806bc2c43-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.578716 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dls8m" event={"ID":"aafb1b3a-241d-41ba-8b4e-c54806bc2c43","Type":"ContainerDied","Data":"dcf6a7f71b46c7d4f559d930e8aee22e0818b4450e214ade80cdbdf9ebb24656"} Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.578770 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dcf6a7f71b46c7d4f559d930e8aee22e0818b4450e214ade80cdbdf9ebb24656" Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.578843 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dls8m" Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.685883 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-dls8m"] Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.693106 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-dls8m"] Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.780385 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-n76kt"] Jan 04 13:19:46 crc kubenswrapper[5003]: E0104 13:19:46.781497 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aafb1b3a-241d-41ba-8b4e-c54806bc2c43" containerName="keystone-bootstrap" Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.781519 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="aafb1b3a-241d-41ba-8b4e-c54806bc2c43" containerName="keystone-bootstrap" Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.781999 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="aafb1b3a-241d-41ba-8b4e-c54806bc2c43" containerName="keystone-bootstrap" Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.782992 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-n76kt" Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.791080 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.791392 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.791573 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.791859 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.792213 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-htkmt" Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.842214 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aafb1b3a-241d-41ba-8b4e-c54806bc2c43" path="/var/lib/kubelet/pods/aafb1b3a-241d-41ba-8b4e-c54806bc2c43/volumes" Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.842959 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-n76kt"] Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.953744 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/940c2752-d359-4e0f-bcca-cfb3689750d4-combined-ca-bundle\") pod \"keystone-bootstrap-n76kt\" (UID: \"940c2752-d359-4e0f-bcca-cfb3689750d4\") " pod="openstack/keystone-bootstrap-n76kt" Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.954378 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-flssx\" (UniqueName: \"kubernetes.io/projected/940c2752-d359-4e0f-bcca-cfb3689750d4-kube-api-access-flssx\") pod \"keystone-bootstrap-n76kt\" (UID: \"940c2752-d359-4e0f-bcca-cfb3689750d4\") " pod="openstack/keystone-bootstrap-n76kt" Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.954617 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/940c2752-d359-4e0f-bcca-cfb3689750d4-config-data\") pod \"keystone-bootstrap-n76kt\" (UID: \"940c2752-d359-4e0f-bcca-cfb3689750d4\") " pod="openstack/keystone-bootstrap-n76kt" Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.954815 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/940c2752-d359-4e0f-bcca-cfb3689750d4-scripts\") pod \"keystone-bootstrap-n76kt\" (UID: \"940c2752-d359-4e0f-bcca-cfb3689750d4\") " pod="openstack/keystone-bootstrap-n76kt" Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.955472 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/940c2752-d359-4e0f-bcca-cfb3689750d4-credential-keys\") pod \"keystone-bootstrap-n76kt\" (UID: \"940c2752-d359-4e0f-bcca-cfb3689750d4\") " pod="openstack/keystone-bootstrap-n76kt" Jan 04 13:19:46 crc kubenswrapper[5003]: I0104 13:19:46.955713 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/940c2752-d359-4e0f-bcca-cfb3689750d4-fernet-keys\") pod \"keystone-bootstrap-n76kt\" (UID: \"940c2752-d359-4e0f-bcca-cfb3689750d4\") " pod="openstack/keystone-bootstrap-n76kt" Jan 04 13:19:47 crc kubenswrapper[5003]: I0104 13:19:47.057791 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/940c2752-d359-4e0f-bcca-cfb3689750d4-config-data\") pod \"keystone-bootstrap-n76kt\" (UID: \"940c2752-d359-4e0f-bcca-cfb3689750d4\") " pod="openstack/keystone-bootstrap-n76kt" Jan 04 13:19:47 crc kubenswrapper[5003]: I0104 13:19:47.057885 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/940c2752-d359-4e0f-bcca-cfb3689750d4-scripts\") pod \"keystone-bootstrap-n76kt\" (UID: \"940c2752-d359-4e0f-bcca-cfb3689750d4\") " pod="openstack/keystone-bootstrap-n76kt" Jan 04 13:19:47 crc kubenswrapper[5003]: I0104 13:19:47.059104 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/940c2752-d359-4e0f-bcca-cfb3689750d4-credential-keys\") pod \"keystone-bootstrap-n76kt\" (UID: \"940c2752-d359-4e0f-bcca-cfb3689750d4\") " pod="openstack/keystone-bootstrap-n76kt" Jan 04 13:19:47 crc kubenswrapper[5003]: I0104 13:19:47.059268 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/940c2752-d359-4e0f-bcca-cfb3689750d4-fernet-keys\") pod \"keystone-bootstrap-n76kt\" (UID: \"940c2752-d359-4e0f-bcca-cfb3689750d4\") " pod="openstack/keystone-bootstrap-n76kt" Jan 04 13:19:47 crc kubenswrapper[5003]: I0104 13:19:47.059341 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/940c2752-d359-4e0f-bcca-cfb3689750d4-combined-ca-bundle\") pod \"keystone-bootstrap-n76kt\" (UID: \"940c2752-d359-4e0f-bcca-cfb3689750d4\") " pod="openstack/keystone-bootstrap-n76kt" Jan 04 13:19:47 crc kubenswrapper[5003]: I0104 13:19:47.059445 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-flssx\" (UniqueName: \"kubernetes.io/projected/940c2752-d359-4e0f-bcca-cfb3689750d4-kube-api-access-flssx\") pod \"keystone-bootstrap-n76kt\" (UID: \"940c2752-d359-4e0f-bcca-cfb3689750d4\") " pod="openstack/keystone-bootstrap-n76kt" Jan 04 13:19:47 crc kubenswrapper[5003]: I0104 13:19:47.067078 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/940c2752-d359-4e0f-bcca-cfb3689750d4-scripts\") pod \"keystone-bootstrap-n76kt\" (UID: \"940c2752-d359-4e0f-bcca-cfb3689750d4\") " pod="openstack/keystone-bootstrap-n76kt" Jan 04 13:19:47 crc kubenswrapper[5003]: I0104 13:19:47.067428 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/940c2752-d359-4e0f-bcca-cfb3689750d4-credential-keys\") pod \"keystone-bootstrap-n76kt\" (UID: \"940c2752-d359-4e0f-bcca-cfb3689750d4\") " pod="openstack/keystone-bootstrap-n76kt" Jan 04 13:19:47 crc kubenswrapper[5003]: I0104 13:19:47.068342 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/940c2752-d359-4e0f-bcca-cfb3689750d4-combined-ca-bundle\") pod \"keystone-bootstrap-n76kt\" (UID: \"940c2752-d359-4e0f-bcca-cfb3689750d4\") " pod="openstack/keystone-bootstrap-n76kt" Jan 04 13:19:47 crc kubenswrapper[5003]: I0104 13:19:47.069004 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/940c2752-d359-4e0f-bcca-cfb3689750d4-config-data\") pod \"keystone-bootstrap-n76kt\" (UID: \"940c2752-d359-4e0f-bcca-cfb3689750d4\") " pod="openstack/keystone-bootstrap-n76kt" Jan 04 13:19:47 crc kubenswrapper[5003]: I0104 13:19:47.069849 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/940c2752-d359-4e0f-bcca-cfb3689750d4-fernet-keys\") pod \"keystone-bootstrap-n76kt\" (UID: \"940c2752-d359-4e0f-bcca-cfb3689750d4\") " pod="openstack/keystone-bootstrap-n76kt" Jan 04 13:19:47 crc kubenswrapper[5003]: I0104 13:19:47.080190 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-flssx\" (UniqueName: \"kubernetes.io/projected/940c2752-d359-4e0f-bcca-cfb3689750d4-kube-api-access-flssx\") pod \"keystone-bootstrap-n76kt\" (UID: \"940c2752-d359-4e0f-bcca-cfb3689750d4\") " pod="openstack/keystone-bootstrap-n76kt" Jan 04 13:19:47 crc kubenswrapper[5003]: I0104 13:19:47.111207 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-n76kt" Jan 04 13:19:47 crc kubenswrapper[5003]: I0104 13:19:47.658116 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-n76kt"] Jan 04 13:19:48 crc kubenswrapper[5003]: I0104 13:19:48.606293 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6d997bd4b5-nmr2n" Jan 04 13:19:48 crc kubenswrapper[5003]: I0104 13:19:48.607121 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-n76kt" event={"ID":"940c2752-d359-4e0f-bcca-cfb3689750d4","Type":"ContainerStarted","Data":"6c15ebb154fe37b5d414d9c2d3d6739aa5eb0728597ef302d4a5e3e1c0fa7ed5"} Jan 04 13:19:48 crc kubenswrapper[5003]: I0104 13:19:48.607178 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-n76kt" event={"ID":"940c2752-d359-4e0f-bcca-cfb3689750d4","Type":"ContainerStarted","Data":"2275c8ce996d5446770352fad6e276b8631831155aad44e79c498bb4372f0ff2"} Jan 04 13:19:48 crc kubenswrapper[5003]: I0104 13:19:48.688181 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-n76kt" podStartSLOduration=2.688149015 podStartE2EDuration="2.688149015s" podCreationTimestamp="2026-01-04 13:19:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 13:19:48.657377875 +0000 UTC m=+5504.130407726" watchObservedRunningTime="2026-01-04 13:19:48.688149015 +0000 UTC m=+5504.161178886" Jan 04 13:19:48 crc kubenswrapper[5003]: I0104 13:19:48.723804 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-659c7d5767-9tcl7"] Jan 04 13:19:48 crc kubenswrapper[5003]: I0104 13:19:48.724080 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-659c7d5767-9tcl7" podUID="d821cd2d-e303-46e8-ae8f-fd92653ec465" containerName="dnsmasq-dns" containerID="cri-o://83c58d976650597f269ca85c443fd99d82576d7466c8f9ac3a32938fa21f4ed0" gracePeriod=10 Jan 04 13:19:49 crc kubenswrapper[5003]: I0104 13:19:49.243880 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-659c7d5767-9tcl7" Jan 04 13:19:49 crc kubenswrapper[5003]: I0104 13:19:49.411567 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d821cd2d-e303-46e8-ae8f-fd92653ec465-config\") pod \"d821cd2d-e303-46e8-ae8f-fd92653ec465\" (UID: \"d821cd2d-e303-46e8-ae8f-fd92653ec465\") " Jan 04 13:19:49 crc kubenswrapper[5003]: I0104 13:19:49.412026 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d821cd2d-e303-46e8-ae8f-fd92653ec465-ovsdbserver-sb\") pod \"d821cd2d-e303-46e8-ae8f-fd92653ec465\" (UID: \"d821cd2d-e303-46e8-ae8f-fd92653ec465\") " Jan 04 13:19:49 crc kubenswrapper[5003]: I0104 13:19:49.412128 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d821cd2d-e303-46e8-ae8f-fd92653ec465-dns-svc\") pod \"d821cd2d-e303-46e8-ae8f-fd92653ec465\" (UID: \"d821cd2d-e303-46e8-ae8f-fd92653ec465\") " Jan 04 13:19:49 crc kubenswrapper[5003]: I0104 13:19:49.412218 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d821cd2d-e303-46e8-ae8f-fd92653ec465-ovsdbserver-nb\") pod \"d821cd2d-e303-46e8-ae8f-fd92653ec465\" (UID: \"d821cd2d-e303-46e8-ae8f-fd92653ec465\") " Jan 04 13:19:49 crc kubenswrapper[5003]: I0104 13:19:49.412294 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kwzxp\" (UniqueName: \"kubernetes.io/projected/d821cd2d-e303-46e8-ae8f-fd92653ec465-kube-api-access-kwzxp\") pod \"d821cd2d-e303-46e8-ae8f-fd92653ec465\" (UID: \"d821cd2d-e303-46e8-ae8f-fd92653ec465\") " Jan 04 13:19:49 crc kubenswrapper[5003]: I0104 13:19:49.418675 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d821cd2d-e303-46e8-ae8f-fd92653ec465-kube-api-access-kwzxp" (OuterVolumeSpecName: "kube-api-access-kwzxp") pod "d821cd2d-e303-46e8-ae8f-fd92653ec465" (UID: "d821cd2d-e303-46e8-ae8f-fd92653ec465"). InnerVolumeSpecName "kube-api-access-kwzxp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:19:49 crc kubenswrapper[5003]: I0104 13:19:49.495575 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d821cd2d-e303-46e8-ae8f-fd92653ec465-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d821cd2d-e303-46e8-ae8f-fd92653ec465" (UID: "d821cd2d-e303-46e8-ae8f-fd92653ec465"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 13:19:49 crc kubenswrapper[5003]: I0104 13:19:49.496919 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d821cd2d-e303-46e8-ae8f-fd92653ec465-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d821cd2d-e303-46e8-ae8f-fd92653ec465" (UID: "d821cd2d-e303-46e8-ae8f-fd92653ec465"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 13:19:49 crc kubenswrapper[5003]: I0104 13:19:49.499483 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d821cd2d-e303-46e8-ae8f-fd92653ec465-config" (OuterVolumeSpecName: "config") pod "d821cd2d-e303-46e8-ae8f-fd92653ec465" (UID: "d821cd2d-e303-46e8-ae8f-fd92653ec465"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 13:19:49 crc kubenswrapper[5003]: I0104 13:19:49.514651 5003 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d821cd2d-e303-46e8-ae8f-fd92653ec465-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:49 crc kubenswrapper[5003]: I0104 13:19:49.514699 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kwzxp\" (UniqueName: \"kubernetes.io/projected/d821cd2d-e303-46e8-ae8f-fd92653ec465-kube-api-access-kwzxp\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:49 crc kubenswrapper[5003]: I0104 13:19:49.514721 5003 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d821cd2d-e303-46e8-ae8f-fd92653ec465-config\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:49 crc kubenswrapper[5003]: I0104 13:19:49.514734 5003 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d821cd2d-e303-46e8-ae8f-fd92653ec465-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:49 crc kubenswrapper[5003]: I0104 13:19:49.521049 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d821cd2d-e303-46e8-ae8f-fd92653ec465-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d821cd2d-e303-46e8-ae8f-fd92653ec465" (UID: "d821cd2d-e303-46e8-ae8f-fd92653ec465"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 13:19:49 crc kubenswrapper[5003]: I0104 13:19:49.616743 5003 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d821cd2d-e303-46e8-ae8f-fd92653ec465-dns-svc\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:49 crc kubenswrapper[5003]: I0104 13:19:49.618581 5003 generic.go:334] "Generic (PLEG): container finished" podID="d821cd2d-e303-46e8-ae8f-fd92653ec465" containerID="83c58d976650597f269ca85c443fd99d82576d7466c8f9ac3a32938fa21f4ed0" exitCode=0 Jan 04 13:19:49 crc kubenswrapper[5003]: I0104 13:19:49.619292 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-659c7d5767-9tcl7" event={"ID":"d821cd2d-e303-46e8-ae8f-fd92653ec465","Type":"ContainerDied","Data":"83c58d976650597f269ca85c443fd99d82576d7466c8f9ac3a32938fa21f4ed0"} Jan 04 13:19:49 crc kubenswrapper[5003]: I0104 13:19:49.619328 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-659c7d5767-9tcl7" event={"ID":"d821cd2d-e303-46e8-ae8f-fd92653ec465","Type":"ContainerDied","Data":"6504ffb3bf31d0b3b364c4fe2eb181590cbfa3db5e5044f7882257b4af7fc857"} Jan 04 13:19:49 crc kubenswrapper[5003]: I0104 13:19:49.619350 5003 scope.go:117] "RemoveContainer" containerID="83c58d976650597f269ca85c443fd99d82576d7466c8f9ac3a32938fa21f4ed0" Jan 04 13:19:49 crc kubenswrapper[5003]: I0104 13:19:49.619780 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-659c7d5767-9tcl7" Jan 04 13:19:49 crc kubenswrapper[5003]: I0104 13:19:49.646264 5003 scope.go:117] "RemoveContainer" containerID="67e308eb51235fed3bd521028cc28bda96693cc9eaffff14d725482032ffaf4b" Jan 04 13:19:49 crc kubenswrapper[5003]: I0104 13:19:49.661214 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-659c7d5767-9tcl7"] Jan 04 13:19:49 crc kubenswrapper[5003]: I0104 13:19:49.667873 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-659c7d5767-9tcl7"] Jan 04 13:19:49 crc kubenswrapper[5003]: I0104 13:19:49.698563 5003 scope.go:117] "RemoveContainer" containerID="83c58d976650597f269ca85c443fd99d82576d7466c8f9ac3a32938fa21f4ed0" Jan 04 13:19:49 crc kubenswrapper[5003]: E0104 13:19:49.699324 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"83c58d976650597f269ca85c443fd99d82576d7466c8f9ac3a32938fa21f4ed0\": container with ID starting with 83c58d976650597f269ca85c443fd99d82576d7466c8f9ac3a32938fa21f4ed0 not found: ID does not exist" containerID="83c58d976650597f269ca85c443fd99d82576d7466c8f9ac3a32938fa21f4ed0" Jan 04 13:19:49 crc kubenswrapper[5003]: I0104 13:19:49.699371 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83c58d976650597f269ca85c443fd99d82576d7466c8f9ac3a32938fa21f4ed0"} err="failed to get container status \"83c58d976650597f269ca85c443fd99d82576d7466c8f9ac3a32938fa21f4ed0\": rpc error: code = NotFound desc = could not find container \"83c58d976650597f269ca85c443fd99d82576d7466c8f9ac3a32938fa21f4ed0\": container with ID starting with 83c58d976650597f269ca85c443fd99d82576d7466c8f9ac3a32938fa21f4ed0 not found: ID does not exist" Jan 04 13:19:49 crc kubenswrapper[5003]: I0104 13:19:49.699407 5003 scope.go:117] "RemoveContainer" containerID="67e308eb51235fed3bd521028cc28bda96693cc9eaffff14d725482032ffaf4b" Jan 04 13:19:49 crc kubenswrapper[5003]: E0104 13:19:49.700159 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67e308eb51235fed3bd521028cc28bda96693cc9eaffff14d725482032ffaf4b\": container with ID starting with 67e308eb51235fed3bd521028cc28bda96693cc9eaffff14d725482032ffaf4b not found: ID does not exist" containerID="67e308eb51235fed3bd521028cc28bda96693cc9eaffff14d725482032ffaf4b" Jan 04 13:19:49 crc kubenswrapper[5003]: I0104 13:19:49.700218 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67e308eb51235fed3bd521028cc28bda96693cc9eaffff14d725482032ffaf4b"} err="failed to get container status \"67e308eb51235fed3bd521028cc28bda96693cc9eaffff14d725482032ffaf4b\": rpc error: code = NotFound desc = could not find container \"67e308eb51235fed3bd521028cc28bda96693cc9eaffff14d725482032ffaf4b\": container with ID starting with 67e308eb51235fed3bd521028cc28bda96693cc9eaffff14d725482032ffaf4b not found: ID does not exist" Jan 04 13:19:50 crc kubenswrapper[5003]: I0104 13:19:50.822711 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d821cd2d-e303-46e8-ae8f-fd92653ec465" path="/var/lib/kubelet/pods/d821cd2d-e303-46e8-ae8f-fd92653ec465/volumes" Jan 04 13:19:51 crc kubenswrapper[5003]: I0104 13:19:51.650118 5003 generic.go:334] "Generic (PLEG): container finished" podID="940c2752-d359-4e0f-bcca-cfb3689750d4" containerID="6c15ebb154fe37b5d414d9c2d3d6739aa5eb0728597ef302d4a5e3e1c0fa7ed5" exitCode=0 Jan 04 13:19:51 crc kubenswrapper[5003]: I0104 13:19:51.650222 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-n76kt" event={"ID":"940c2752-d359-4e0f-bcca-cfb3689750d4","Type":"ContainerDied","Data":"6c15ebb154fe37b5d414d9c2d3d6739aa5eb0728597ef302d4a5e3e1c0fa7ed5"} Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.017062 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-n76kt" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.195497 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/940c2752-d359-4e0f-bcca-cfb3689750d4-credential-keys\") pod \"940c2752-d359-4e0f-bcca-cfb3689750d4\" (UID: \"940c2752-d359-4e0f-bcca-cfb3689750d4\") " Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.196387 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/940c2752-d359-4e0f-bcca-cfb3689750d4-config-data\") pod \"940c2752-d359-4e0f-bcca-cfb3689750d4\" (UID: \"940c2752-d359-4e0f-bcca-cfb3689750d4\") " Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.196509 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/940c2752-d359-4e0f-bcca-cfb3689750d4-scripts\") pod \"940c2752-d359-4e0f-bcca-cfb3689750d4\" (UID: \"940c2752-d359-4e0f-bcca-cfb3689750d4\") " Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.196567 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/940c2752-d359-4e0f-bcca-cfb3689750d4-combined-ca-bundle\") pod \"940c2752-d359-4e0f-bcca-cfb3689750d4\" (UID: \"940c2752-d359-4e0f-bcca-cfb3689750d4\") " Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.196645 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/940c2752-d359-4e0f-bcca-cfb3689750d4-fernet-keys\") pod \"940c2752-d359-4e0f-bcca-cfb3689750d4\" (UID: \"940c2752-d359-4e0f-bcca-cfb3689750d4\") " Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.196803 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-flssx\" (UniqueName: \"kubernetes.io/projected/940c2752-d359-4e0f-bcca-cfb3689750d4-kube-api-access-flssx\") pod \"940c2752-d359-4e0f-bcca-cfb3689750d4\" (UID: \"940c2752-d359-4e0f-bcca-cfb3689750d4\") " Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.204243 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/940c2752-d359-4e0f-bcca-cfb3689750d4-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "940c2752-d359-4e0f-bcca-cfb3689750d4" (UID: "940c2752-d359-4e0f-bcca-cfb3689750d4"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.205502 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/940c2752-d359-4e0f-bcca-cfb3689750d4-kube-api-access-flssx" (OuterVolumeSpecName: "kube-api-access-flssx") pod "940c2752-d359-4e0f-bcca-cfb3689750d4" (UID: "940c2752-d359-4e0f-bcca-cfb3689750d4"). InnerVolumeSpecName "kube-api-access-flssx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.205485 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/940c2752-d359-4e0f-bcca-cfb3689750d4-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "940c2752-d359-4e0f-bcca-cfb3689750d4" (UID: "940c2752-d359-4e0f-bcca-cfb3689750d4"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.206572 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/940c2752-d359-4e0f-bcca-cfb3689750d4-scripts" (OuterVolumeSpecName: "scripts") pod "940c2752-d359-4e0f-bcca-cfb3689750d4" (UID: "940c2752-d359-4e0f-bcca-cfb3689750d4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.227619 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/940c2752-d359-4e0f-bcca-cfb3689750d4-config-data" (OuterVolumeSpecName: "config-data") pod "940c2752-d359-4e0f-bcca-cfb3689750d4" (UID: "940c2752-d359-4e0f-bcca-cfb3689750d4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.249264 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/940c2752-d359-4e0f-bcca-cfb3689750d4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "940c2752-d359-4e0f-bcca-cfb3689750d4" (UID: "940c2752-d359-4e0f-bcca-cfb3689750d4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.299679 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-flssx\" (UniqueName: \"kubernetes.io/projected/940c2752-d359-4e0f-bcca-cfb3689750d4-kube-api-access-flssx\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.299724 5003 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/940c2752-d359-4e0f-bcca-cfb3689750d4-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.299738 5003 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/940c2752-d359-4e0f-bcca-cfb3689750d4-config-data\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.299749 5003 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/940c2752-d359-4e0f-bcca-cfb3689750d4-scripts\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.299763 5003 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/940c2752-d359-4e0f-bcca-cfb3689750d4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.299774 5003 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/940c2752-d359-4e0f-bcca-cfb3689750d4-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.675535 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-n76kt" event={"ID":"940c2752-d359-4e0f-bcca-cfb3689750d4","Type":"ContainerDied","Data":"2275c8ce996d5446770352fad6e276b8631831155aad44e79c498bb4372f0ff2"} Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.675608 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2275c8ce996d5446770352fad6e276b8631831155aad44e79c498bb4372f0ff2" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.675707 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-n76kt" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.812524 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-66dd6c68-jl7x7"] Jan 04 13:19:53 crc kubenswrapper[5003]: E0104 13:19:53.812906 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="940c2752-d359-4e0f-bcca-cfb3689750d4" containerName="keystone-bootstrap" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.812926 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="940c2752-d359-4e0f-bcca-cfb3689750d4" containerName="keystone-bootstrap" Jan 04 13:19:53 crc kubenswrapper[5003]: E0104 13:19:53.812964 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d821cd2d-e303-46e8-ae8f-fd92653ec465" containerName="dnsmasq-dns" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.812972 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d821cd2d-e303-46e8-ae8f-fd92653ec465" containerName="dnsmasq-dns" Jan 04 13:19:53 crc kubenswrapper[5003]: E0104 13:19:53.812984 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d821cd2d-e303-46e8-ae8f-fd92653ec465" containerName="init" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.812990 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="d821cd2d-e303-46e8-ae8f-fd92653ec465" containerName="init" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.813169 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="940c2752-d359-4e0f-bcca-cfb3689750d4" containerName="keystone-bootstrap" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.813200 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="d821cd2d-e303-46e8-ae8f-fd92653ec465" containerName="dnsmasq-dns" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.813823 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-66dd6c68-jl7x7" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.818495 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-domains" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.818799 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.818911 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.819584 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.819744 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-htkmt" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.821264 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.828094 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.836548 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-66dd6c68-jl7x7"] Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.915867 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4cc62\" (UniqueName: \"kubernetes.io/projected/ab1bcdeb-2e22-414d-9214-858e867a6617-kube-api-access-4cc62\") pod \"keystone-66dd6c68-jl7x7\" (UID: \"ab1bcdeb-2e22-414d-9214-858e867a6617\") " pod="openstack/keystone-66dd6c68-jl7x7" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.915934 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab1bcdeb-2e22-414d-9214-858e867a6617-scripts\") pod \"keystone-66dd6c68-jl7x7\" (UID: \"ab1bcdeb-2e22-414d-9214-858e867a6617\") " pod="openstack/keystone-66dd6c68-jl7x7" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.915971 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"keystone-domains\" (UniqueName: \"kubernetes.io/secret/ab1bcdeb-2e22-414d-9214-858e867a6617-keystone-domains\") pod \"keystone-66dd6c68-jl7x7\" (UID: \"ab1bcdeb-2e22-414d-9214-858e867a6617\") " pod="openstack/keystone-66dd6c68-jl7x7" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.916000 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab1bcdeb-2e22-414d-9214-858e867a6617-public-tls-certs\") pod \"keystone-66dd6c68-jl7x7\" (UID: \"ab1bcdeb-2e22-414d-9214-858e867a6617\") " pod="openstack/keystone-66dd6c68-jl7x7" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.916047 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ab1bcdeb-2e22-414d-9214-858e867a6617-fernet-keys\") pod \"keystone-66dd6c68-jl7x7\" (UID: \"ab1bcdeb-2e22-414d-9214-858e867a6617\") " pod="openstack/keystone-66dd6c68-jl7x7" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.916340 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab1bcdeb-2e22-414d-9214-858e867a6617-config-data\") pod \"keystone-66dd6c68-jl7x7\" (UID: \"ab1bcdeb-2e22-414d-9214-858e867a6617\") " pod="openstack/keystone-66dd6c68-jl7x7" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.916497 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ab1bcdeb-2e22-414d-9214-858e867a6617-credential-keys\") pod \"keystone-66dd6c68-jl7x7\" (UID: \"ab1bcdeb-2e22-414d-9214-858e867a6617\") " pod="openstack/keystone-66dd6c68-jl7x7" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.916591 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab1bcdeb-2e22-414d-9214-858e867a6617-internal-tls-certs\") pod \"keystone-66dd6c68-jl7x7\" (UID: \"ab1bcdeb-2e22-414d-9214-858e867a6617\") " pod="openstack/keystone-66dd6c68-jl7x7" Jan 04 13:19:53 crc kubenswrapper[5003]: I0104 13:19:53.916746 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab1bcdeb-2e22-414d-9214-858e867a6617-combined-ca-bundle\") pod \"keystone-66dd6c68-jl7x7\" (UID: \"ab1bcdeb-2e22-414d-9214-858e867a6617\") " pod="openstack/keystone-66dd6c68-jl7x7" Jan 04 13:19:54 crc kubenswrapper[5003]: I0104 13:19:54.018401 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab1bcdeb-2e22-414d-9214-858e867a6617-config-data\") pod \"keystone-66dd6c68-jl7x7\" (UID: \"ab1bcdeb-2e22-414d-9214-858e867a6617\") " pod="openstack/keystone-66dd6c68-jl7x7" Jan 04 13:19:54 crc kubenswrapper[5003]: I0104 13:19:54.018471 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ab1bcdeb-2e22-414d-9214-858e867a6617-credential-keys\") pod \"keystone-66dd6c68-jl7x7\" (UID: \"ab1bcdeb-2e22-414d-9214-858e867a6617\") " pod="openstack/keystone-66dd6c68-jl7x7" Jan 04 13:19:54 crc kubenswrapper[5003]: I0104 13:19:54.018522 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab1bcdeb-2e22-414d-9214-858e867a6617-internal-tls-certs\") pod \"keystone-66dd6c68-jl7x7\" (UID: \"ab1bcdeb-2e22-414d-9214-858e867a6617\") " pod="openstack/keystone-66dd6c68-jl7x7" Jan 04 13:19:54 crc kubenswrapper[5003]: I0104 13:19:54.018567 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab1bcdeb-2e22-414d-9214-858e867a6617-combined-ca-bundle\") pod \"keystone-66dd6c68-jl7x7\" (UID: \"ab1bcdeb-2e22-414d-9214-858e867a6617\") " pod="openstack/keystone-66dd6c68-jl7x7" Jan 04 13:19:54 crc kubenswrapper[5003]: I0104 13:19:54.018625 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4cc62\" (UniqueName: \"kubernetes.io/projected/ab1bcdeb-2e22-414d-9214-858e867a6617-kube-api-access-4cc62\") pod \"keystone-66dd6c68-jl7x7\" (UID: \"ab1bcdeb-2e22-414d-9214-858e867a6617\") " pod="openstack/keystone-66dd6c68-jl7x7" Jan 04 13:19:54 crc kubenswrapper[5003]: I0104 13:19:54.018647 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab1bcdeb-2e22-414d-9214-858e867a6617-scripts\") pod \"keystone-66dd6c68-jl7x7\" (UID: \"ab1bcdeb-2e22-414d-9214-858e867a6617\") " pod="openstack/keystone-66dd6c68-jl7x7" Jan 04 13:19:54 crc kubenswrapper[5003]: I0104 13:19:54.018666 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"keystone-domains\" (UniqueName: \"kubernetes.io/secret/ab1bcdeb-2e22-414d-9214-858e867a6617-keystone-domains\") pod \"keystone-66dd6c68-jl7x7\" (UID: \"ab1bcdeb-2e22-414d-9214-858e867a6617\") " pod="openstack/keystone-66dd6c68-jl7x7" Jan 04 13:19:54 crc kubenswrapper[5003]: I0104 13:19:54.018687 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab1bcdeb-2e22-414d-9214-858e867a6617-public-tls-certs\") pod \"keystone-66dd6c68-jl7x7\" (UID: \"ab1bcdeb-2e22-414d-9214-858e867a6617\") " pod="openstack/keystone-66dd6c68-jl7x7" Jan 04 13:19:54 crc kubenswrapper[5003]: I0104 13:19:54.018724 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ab1bcdeb-2e22-414d-9214-858e867a6617-fernet-keys\") pod \"keystone-66dd6c68-jl7x7\" (UID: \"ab1bcdeb-2e22-414d-9214-858e867a6617\") " pod="openstack/keystone-66dd6c68-jl7x7" Jan 04 13:19:54 crc kubenswrapper[5003]: I0104 13:19:54.023438 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab1bcdeb-2e22-414d-9214-858e867a6617-scripts\") pod \"keystone-66dd6c68-jl7x7\" (UID: \"ab1bcdeb-2e22-414d-9214-858e867a6617\") " pod="openstack/keystone-66dd6c68-jl7x7" Jan 04 13:19:54 crc kubenswrapper[5003]: I0104 13:19:54.024433 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab1bcdeb-2e22-414d-9214-858e867a6617-internal-tls-certs\") pod \"keystone-66dd6c68-jl7x7\" (UID: \"ab1bcdeb-2e22-414d-9214-858e867a6617\") " pod="openstack/keystone-66dd6c68-jl7x7" Jan 04 13:19:54 crc kubenswrapper[5003]: I0104 13:19:54.024571 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ab1bcdeb-2e22-414d-9214-858e867a6617-credential-keys\") pod \"keystone-66dd6c68-jl7x7\" (UID: \"ab1bcdeb-2e22-414d-9214-858e867a6617\") " pod="openstack/keystone-66dd6c68-jl7x7" Jan 04 13:19:54 crc kubenswrapper[5003]: I0104 13:19:54.030378 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ab1bcdeb-2e22-414d-9214-858e867a6617-fernet-keys\") pod \"keystone-66dd6c68-jl7x7\" (UID: \"ab1bcdeb-2e22-414d-9214-858e867a6617\") " pod="openstack/keystone-66dd6c68-jl7x7" Jan 04 13:19:54 crc kubenswrapper[5003]: I0104 13:19:54.030580 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"keystone-domains\" (UniqueName: \"kubernetes.io/secret/ab1bcdeb-2e22-414d-9214-858e867a6617-keystone-domains\") pod \"keystone-66dd6c68-jl7x7\" (UID: \"ab1bcdeb-2e22-414d-9214-858e867a6617\") " pod="openstack/keystone-66dd6c68-jl7x7" Jan 04 13:19:54 crc kubenswrapper[5003]: I0104 13:19:54.030855 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab1bcdeb-2e22-414d-9214-858e867a6617-public-tls-certs\") pod \"keystone-66dd6c68-jl7x7\" (UID: \"ab1bcdeb-2e22-414d-9214-858e867a6617\") " pod="openstack/keystone-66dd6c68-jl7x7" Jan 04 13:19:54 crc kubenswrapper[5003]: I0104 13:19:54.034000 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab1bcdeb-2e22-414d-9214-858e867a6617-combined-ca-bundle\") pod \"keystone-66dd6c68-jl7x7\" (UID: \"ab1bcdeb-2e22-414d-9214-858e867a6617\") " pod="openstack/keystone-66dd6c68-jl7x7" Jan 04 13:19:54 crc kubenswrapper[5003]: I0104 13:19:54.034357 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab1bcdeb-2e22-414d-9214-858e867a6617-config-data\") pod \"keystone-66dd6c68-jl7x7\" (UID: \"ab1bcdeb-2e22-414d-9214-858e867a6617\") " pod="openstack/keystone-66dd6c68-jl7x7" Jan 04 13:19:54 crc kubenswrapper[5003]: I0104 13:19:54.049791 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4cc62\" (UniqueName: \"kubernetes.io/projected/ab1bcdeb-2e22-414d-9214-858e867a6617-kube-api-access-4cc62\") pod \"keystone-66dd6c68-jl7x7\" (UID: \"ab1bcdeb-2e22-414d-9214-858e867a6617\") " pod="openstack/keystone-66dd6c68-jl7x7" Jan 04 13:19:54 crc kubenswrapper[5003]: I0104 13:19:54.131194 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-66dd6c68-jl7x7" Jan 04 13:19:54 crc kubenswrapper[5003]: I0104 13:19:54.691502 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-66dd6c68-jl7x7"] Jan 04 13:19:55 crc kubenswrapper[5003]: I0104 13:19:55.706661 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-66dd6c68-jl7x7" event={"ID":"ab1bcdeb-2e22-414d-9214-858e867a6617","Type":"ContainerStarted","Data":"5908e28bbd23f61286c8699565c1a7b5758d61e25dc7a9e689accede8782453e"} Jan 04 13:19:55 crc kubenswrapper[5003]: I0104 13:19:55.707595 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-66dd6c68-jl7x7" event={"ID":"ab1bcdeb-2e22-414d-9214-858e867a6617","Type":"ContainerStarted","Data":"96f0e71de801adf281ad23a44cca74a9049f9a1efdc06f691738341c558f3c03"} Jan 04 13:19:55 crc kubenswrapper[5003]: I0104 13:19:55.709131 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-66dd6c68-jl7x7" Jan 04 13:19:55 crc kubenswrapper[5003]: I0104 13:19:55.764975 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-66dd6c68-jl7x7" podStartSLOduration=2.764950396 podStartE2EDuration="2.764950396s" podCreationTimestamp="2026-01-04 13:19:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 13:19:55.745300379 +0000 UTC m=+5511.218330230" watchObservedRunningTime="2026-01-04 13:19:55.764950396 +0000 UTC m=+5511.237980247" Jan 04 13:20:14 crc kubenswrapper[5003]: I0104 13:20:14.894176 5003 scope.go:117] "RemoveContainer" containerID="357247eac594aa18425aa1c27519f655642d0ca34632d7f5734691ee0393d77c" Jan 04 13:20:25 crc kubenswrapper[5003]: I0104 13:20:25.748922 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-66dd6c68-jl7x7" Jan 04 13:20:27 crc kubenswrapper[5003]: I0104 13:20:27.729966 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Jan 04 13:20:27 crc kubenswrapper[5003]: I0104 13:20:27.734371 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 04 13:20:27 crc kubenswrapper[5003]: I0104 13:20:27.737700 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-d2lkj" Jan 04 13:20:27 crc kubenswrapper[5003]: I0104 13:20:27.738132 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Jan 04 13:20:27 crc kubenswrapper[5003]: I0104 13:20:27.738352 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Jan 04 13:20:27 crc kubenswrapper[5003]: I0104 13:20:27.757999 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 04 13:20:27 crc kubenswrapper[5003]: I0104 13:20:27.860961 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd1398e5-5eb1-48b1-b460-343c25250504-combined-ca-bundle\") pod \"openstackclient\" (UID: \"bd1398e5-5eb1-48b1-b460-343c25250504\") " pod="openstack/openstackclient" Jan 04 13:20:27 crc kubenswrapper[5003]: I0104 13:20:27.861358 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/bd1398e5-5eb1-48b1-b460-343c25250504-openstack-config-secret\") pod \"openstackclient\" (UID: \"bd1398e5-5eb1-48b1-b460-343c25250504\") " pod="openstack/openstackclient" Jan 04 13:20:27 crc kubenswrapper[5003]: I0104 13:20:27.861534 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/bd1398e5-5eb1-48b1-b460-343c25250504-openstack-config\") pod \"openstackclient\" (UID: \"bd1398e5-5eb1-48b1-b460-343c25250504\") " pod="openstack/openstackclient" Jan 04 13:20:27 crc kubenswrapper[5003]: I0104 13:20:27.861675 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q66ln\" (UniqueName: \"kubernetes.io/projected/bd1398e5-5eb1-48b1-b460-343c25250504-kube-api-access-q66ln\") pod \"openstackclient\" (UID: \"bd1398e5-5eb1-48b1-b460-343c25250504\") " pod="openstack/openstackclient" Jan 04 13:20:27 crc kubenswrapper[5003]: I0104 13:20:27.963965 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/bd1398e5-5eb1-48b1-b460-343c25250504-openstack-config-secret\") pod \"openstackclient\" (UID: \"bd1398e5-5eb1-48b1-b460-343c25250504\") " pod="openstack/openstackclient" Jan 04 13:20:27 crc kubenswrapper[5003]: I0104 13:20:27.964760 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/bd1398e5-5eb1-48b1-b460-343c25250504-openstack-config\") pod \"openstackclient\" (UID: \"bd1398e5-5eb1-48b1-b460-343c25250504\") " pod="openstack/openstackclient" Jan 04 13:20:27 crc kubenswrapper[5003]: I0104 13:20:27.964818 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q66ln\" (UniqueName: \"kubernetes.io/projected/bd1398e5-5eb1-48b1-b460-343c25250504-kube-api-access-q66ln\") pod \"openstackclient\" (UID: \"bd1398e5-5eb1-48b1-b460-343c25250504\") " pod="openstack/openstackclient" Jan 04 13:20:27 crc kubenswrapper[5003]: I0104 13:20:27.964943 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd1398e5-5eb1-48b1-b460-343c25250504-combined-ca-bundle\") pod \"openstackclient\" (UID: \"bd1398e5-5eb1-48b1-b460-343c25250504\") " pod="openstack/openstackclient" Jan 04 13:20:27 crc kubenswrapper[5003]: I0104 13:20:27.966555 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/bd1398e5-5eb1-48b1-b460-343c25250504-openstack-config\") pod \"openstackclient\" (UID: \"bd1398e5-5eb1-48b1-b460-343c25250504\") " pod="openstack/openstackclient" Jan 04 13:20:27 crc kubenswrapper[5003]: I0104 13:20:27.973841 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/bd1398e5-5eb1-48b1-b460-343c25250504-openstack-config-secret\") pod \"openstackclient\" (UID: \"bd1398e5-5eb1-48b1-b460-343c25250504\") " pod="openstack/openstackclient" Jan 04 13:20:27 crc kubenswrapper[5003]: I0104 13:20:27.973915 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd1398e5-5eb1-48b1-b460-343c25250504-combined-ca-bundle\") pod \"openstackclient\" (UID: \"bd1398e5-5eb1-48b1-b460-343c25250504\") " pod="openstack/openstackclient" Jan 04 13:20:27 crc kubenswrapper[5003]: I0104 13:20:27.986862 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q66ln\" (UniqueName: \"kubernetes.io/projected/bd1398e5-5eb1-48b1-b460-343c25250504-kube-api-access-q66ln\") pod \"openstackclient\" (UID: \"bd1398e5-5eb1-48b1-b460-343c25250504\") " pod="openstack/openstackclient" Jan 04 13:20:28 crc kubenswrapper[5003]: I0104 13:20:28.082694 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Jan 04 13:20:28 crc kubenswrapper[5003]: I0104 13:20:28.362669 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Jan 04 13:20:29 crc kubenswrapper[5003]: I0104 13:20:29.080410 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"bd1398e5-5eb1-48b1-b460-343c25250504","Type":"ContainerStarted","Data":"733fa1663825bda4a174e13de901978534e55ebdfa370842c9cf37a803242b6d"} Jan 04 13:20:29 crc kubenswrapper[5003]: I0104 13:20:29.081123 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"bd1398e5-5eb1-48b1-b460-343c25250504","Type":"ContainerStarted","Data":"4a9d95214636582db9ce02e207b68926163c601ecbb7e348a166fe44a0f9d05c"} Jan 04 13:20:29 crc kubenswrapper[5003]: I0104 13:20:29.103195 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.103168984 podStartE2EDuration="2.103168984s" podCreationTimestamp="2026-01-04 13:20:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 13:20:29.097649488 +0000 UTC m=+5544.570679340" watchObservedRunningTime="2026-01-04 13:20:29.103168984 +0000 UTC m=+5544.576198845" Jan 04 13:21:15 crc kubenswrapper[5003]: I0104 13:21:15.061413 5003 scope.go:117] "RemoveContainer" containerID="56e2a7be88045c4d67c8c688eb3089019ac2be40ccc6c9613295aabfcec7cb5a" Jan 04 13:21:15 crc kubenswrapper[5003]: I0104 13:21:15.103441 5003 scope.go:117] "RemoveContainer" containerID="86c230fd02a050651095f0b4aa053db4da22570da041b613bd27957496e1f9d9" Jan 04 13:21:15 crc kubenswrapper[5003]: I0104 13:21:15.173993 5003 scope.go:117] "RemoveContainer" containerID="cc02aa7d89f2a7c5cbbfb381eee16b5be37d3467f6a0e97fe6472a7e50e384e4" Jan 04 13:21:39 crc kubenswrapper[5003]: I0104 13:21:39.419994 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 13:21:39 crc kubenswrapper[5003]: I0104 13:21:39.421123 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 13:22:09 crc kubenswrapper[5003]: I0104 13:22:09.418132 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 13:22:09 crc kubenswrapper[5003]: I0104 13:22:09.418910 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 13:22:32 crc kubenswrapper[5003]: I0104 13:22:32.086042 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-rh47m"] Jan 04 13:22:32 crc kubenswrapper[5003]: I0104 13:22:32.095222 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-rh47m"] Jan 04 13:22:32 crc kubenswrapper[5003]: I0104 13:22:32.824960 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a464e30-2e05-4592-940e-5335bb92b086" path="/var/lib/kubelet/pods/9a464e30-2e05-4592-940e-5335bb92b086/volumes" Jan 04 13:22:39 crc kubenswrapper[5003]: I0104 13:22:39.419010 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 13:22:39 crc kubenswrapper[5003]: I0104 13:22:39.420924 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 13:22:39 crc kubenswrapper[5003]: I0104 13:22:39.420979 5003 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" Jan 04 13:22:39 crc kubenswrapper[5003]: I0104 13:22:39.421680 5003 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7aa67a3c5c4fdbbd2e40aec0730029c1a3d38ba544747c5e09a0aef3629ccda4"} pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 04 13:22:39 crc kubenswrapper[5003]: I0104 13:22:39.421746 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" containerID="cri-o://7aa67a3c5c4fdbbd2e40aec0730029c1a3d38ba544747c5e09a0aef3629ccda4" gracePeriod=600 Jan 04 13:22:39 crc kubenswrapper[5003]: E0104 13:22:39.565786 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:22:39 crc kubenswrapper[5003]: I0104 13:22:39.657699 5003 generic.go:334] "Generic (PLEG): container finished" podID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerID="7aa67a3c5c4fdbbd2e40aec0730029c1a3d38ba544747c5e09a0aef3629ccda4" exitCode=0 Jan 04 13:22:39 crc kubenswrapper[5003]: I0104 13:22:39.657808 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerDied","Data":"7aa67a3c5c4fdbbd2e40aec0730029c1a3d38ba544747c5e09a0aef3629ccda4"} Jan 04 13:22:39 crc kubenswrapper[5003]: I0104 13:22:39.657913 5003 scope.go:117] "RemoveContainer" containerID="97829298803abd5773d5e3270e0265b933495a48a989f0b5b421d823db2a3293" Jan 04 13:22:39 crc kubenswrapper[5003]: I0104 13:22:39.659143 5003 scope.go:117] "RemoveContainer" containerID="7aa67a3c5c4fdbbd2e40aec0730029c1a3d38ba544747c5e09a0aef3629ccda4" Jan 04 13:22:39 crc kubenswrapper[5003]: E0104 13:22:39.659687 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:22:53 crc kubenswrapper[5003]: I0104 13:22:53.806174 5003 scope.go:117] "RemoveContainer" containerID="7aa67a3c5c4fdbbd2e40aec0730029c1a3d38ba544747c5e09a0aef3629ccda4" Jan 04 13:22:53 crc kubenswrapper[5003]: E0104 13:22:53.807143 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:23:04 crc kubenswrapper[5003]: I0104 13:23:04.819299 5003 scope.go:117] "RemoveContainer" containerID="7aa67a3c5c4fdbbd2e40aec0730029c1a3d38ba544747c5e09a0aef3629ccda4" Jan 04 13:23:04 crc kubenswrapper[5003]: E0104 13:23:04.821313 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:23:06 crc kubenswrapper[5003]: I0104 13:23:06.061083 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-w9lnh/must-gather-w2x2z"] Jan 04 13:23:06 crc kubenswrapper[5003]: I0104 13:23:06.062755 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-w9lnh/must-gather-w2x2z" Jan 04 13:23:06 crc kubenswrapper[5003]: I0104 13:23:06.064635 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-w9lnh"/"default-dockercfg-tn92n" Jan 04 13:23:06 crc kubenswrapper[5003]: I0104 13:23:06.065520 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-w9lnh"/"kube-root-ca.crt" Jan 04 13:23:06 crc kubenswrapper[5003]: I0104 13:23:06.066202 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-w9lnh"/"openshift-service-ca.crt" Jan 04 13:23:06 crc kubenswrapper[5003]: I0104 13:23:06.074943 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-w9lnh/must-gather-w2x2z"] Jan 04 13:23:06 crc kubenswrapper[5003]: I0104 13:23:06.182097 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/b3f04202-e84a-44a9-a571-a9cd51a8a1bc-must-gather-output\") pod \"must-gather-w2x2z\" (UID: \"b3f04202-e84a-44a9-a571-a9cd51a8a1bc\") " pod="openshift-must-gather-w9lnh/must-gather-w2x2z" Jan 04 13:23:06 crc kubenswrapper[5003]: I0104 13:23:06.182337 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjm76\" (UniqueName: \"kubernetes.io/projected/b3f04202-e84a-44a9-a571-a9cd51a8a1bc-kube-api-access-fjm76\") pod \"must-gather-w2x2z\" (UID: \"b3f04202-e84a-44a9-a571-a9cd51a8a1bc\") " pod="openshift-must-gather-w9lnh/must-gather-w2x2z" Jan 04 13:23:06 crc kubenswrapper[5003]: I0104 13:23:06.283945 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjm76\" (UniqueName: \"kubernetes.io/projected/b3f04202-e84a-44a9-a571-a9cd51a8a1bc-kube-api-access-fjm76\") pod \"must-gather-w2x2z\" (UID: \"b3f04202-e84a-44a9-a571-a9cd51a8a1bc\") " pod="openshift-must-gather-w9lnh/must-gather-w2x2z" Jan 04 13:23:06 crc kubenswrapper[5003]: I0104 13:23:06.284085 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/b3f04202-e84a-44a9-a571-a9cd51a8a1bc-must-gather-output\") pod \"must-gather-w2x2z\" (UID: \"b3f04202-e84a-44a9-a571-a9cd51a8a1bc\") " pod="openshift-must-gather-w9lnh/must-gather-w2x2z" Jan 04 13:23:06 crc kubenswrapper[5003]: I0104 13:23:06.284552 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/b3f04202-e84a-44a9-a571-a9cd51a8a1bc-must-gather-output\") pod \"must-gather-w2x2z\" (UID: \"b3f04202-e84a-44a9-a571-a9cd51a8a1bc\") " pod="openshift-must-gather-w9lnh/must-gather-w2x2z" Jan 04 13:23:06 crc kubenswrapper[5003]: I0104 13:23:06.315082 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjm76\" (UniqueName: \"kubernetes.io/projected/b3f04202-e84a-44a9-a571-a9cd51a8a1bc-kube-api-access-fjm76\") pod \"must-gather-w2x2z\" (UID: \"b3f04202-e84a-44a9-a571-a9cd51a8a1bc\") " pod="openshift-must-gather-w9lnh/must-gather-w2x2z" Jan 04 13:23:06 crc kubenswrapper[5003]: I0104 13:23:06.380959 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-w9lnh/must-gather-w2x2z" Jan 04 13:23:06 crc kubenswrapper[5003]: I0104 13:23:06.891849 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-w9lnh/must-gather-w2x2z"] Jan 04 13:23:06 crc kubenswrapper[5003]: I0104 13:23:06.896640 5003 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 04 13:23:07 crc kubenswrapper[5003]: I0104 13:23:07.012057 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-w9lnh/must-gather-w2x2z" event={"ID":"b3f04202-e84a-44a9-a571-a9cd51a8a1bc","Type":"ContainerStarted","Data":"e3038e92b0eaea8a2615a897a925495c88a3f96f2abe4cb56d3a60572af82fe5"} Jan 04 13:23:15 crc kubenswrapper[5003]: I0104 13:23:15.087640 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-w9lnh/must-gather-w2x2z" event={"ID":"b3f04202-e84a-44a9-a571-a9cd51a8a1bc","Type":"ContainerStarted","Data":"310158d0c40217033843388ed6c08226669162ac3d5a941ec2398c57fed6e4ee"} Jan 04 13:23:15 crc kubenswrapper[5003]: I0104 13:23:15.088656 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-w9lnh/must-gather-w2x2z" event={"ID":"b3f04202-e84a-44a9-a571-a9cd51a8a1bc","Type":"ContainerStarted","Data":"220bba9d3f926193d66fe81356b562a3855a0760e6f353ceacd09e47e0a4aeff"} Jan 04 13:23:15 crc kubenswrapper[5003]: I0104 13:23:15.108536 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-w9lnh/must-gather-w2x2z" podStartSLOduration=1.542303789 podStartE2EDuration="9.10851287s" podCreationTimestamp="2026-01-04 13:23:06 +0000 UTC" firstStartedPulling="2026-01-04 13:23:06.896359822 +0000 UTC m=+5702.369389663" lastFinishedPulling="2026-01-04 13:23:14.462568903 +0000 UTC m=+5709.935598744" observedRunningTime="2026-01-04 13:23:15.103533049 +0000 UTC m=+5710.576562890" watchObservedRunningTime="2026-01-04 13:23:15.10851287 +0000 UTC m=+5710.581542711" Jan 04 13:23:15 crc kubenswrapper[5003]: I0104 13:23:15.305279 5003 scope.go:117] "RemoveContainer" containerID="37479772f97e43e4755e6d9eb0ebf34db29c771973543d3c5c34a49e93643e56" Jan 04 13:23:17 crc kubenswrapper[5003]: I0104 13:23:17.826596 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-w9lnh/crc-debug-qkg44"] Jan 04 13:23:17 crc kubenswrapper[5003]: I0104 13:23:17.828627 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-w9lnh/crc-debug-qkg44" Jan 04 13:23:17 crc kubenswrapper[5003]: I0104 13:23:17.867275 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xdgn\" (UniqueName: \"kubernetes.io/projected/81993cb0-0a88-4f6e-9d88-f728b9b56141-kube-api-access-4xdgn\") pod \"crc-debug-qkg44\" (UID: \"81993cb0-0a88-4f6e-9d88-f728b9b56141\") " pod="openshift-must-gather-w9lnh/crc-debug-qkg44" Jan 04 13:23:17 crc kubenswrapper[5003]: I0104 13:23:17.867392 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/81993cb0-0a88-4f6e-9d88-f728b9b56141-host\") pod \"crc-debug-qkg44\" (UID: \"81993cb0-0a88-4f6e-9d88-f728b9b56141\") " pod="openshift-must-gather-w9lnh/crc-debug-qkg44" Jan 04 13:23:17 crc kubenswrapper[5003]: I0104 13:23:17.969642 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xdgn\" (UniqueName: \"kubernetes.io/projected/81993cb0-0a88-4f6e-9d88-f728b9b56141-kube-api-access-4xdgn\") pod \"crc-debug-qkg44\" (UID: \"81993cb0-0a88-4f6e-9d88-f728b9b56141\") " pod="openshift-must-gather-w9lnh/crc-debug-qkg44" Jan 04 13:23:17 crc kubenswrapper[5003]: I0104 13:23:17.969749 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/81993cb0-0a88-4f6e-9d88-f728b9b56141-host\") pod \"crc-debug-qkg44\" (UID: \"81993cb0-0a88-4f6e-9d88-f728b9b56141\") " pod="openshift-must-gather-w9lnh/crc-debug-qkg44" Jan 04 13:23:17 crc kubenswrapper[5003]: I0104 13:23:17.969943 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/81993cb0-0a88-4f6e-9d88-f728b9b56141-host\") pod \"crc-debug-qkg44\" (UID: \"81993cb0-0a88-4f6e-9d88-f728b9b56141\") " pod="openshift-must-gather-w9lnh/crc-debug-qkg44" Jan 04 13:23:18 crc kubenswrapper[5003]: I0104 13:23:18.005597 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xdgn\" (UniqueName: \"kubernetes.io/projected/81993cb0-0a88-4f6e-9d88-f728b9b56141-kube-api-access-4xdgn\") pod \"crc-debug-qkg44\" (UID: \"81993cb0-0a88-4f6e-9d88-f728b9b56141\") " pod="openshift-must-gather-w9lnh/crc-debug-qkg44" Jan 04 13:23:18 crc kubenswrapper[5003]: I0104 13:23:18.143978 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-w9lnh/crc-debug-qkg44" Jan 04 13:23:19 crc kubenswrapper[5003]: I0104 13:23:19.130970 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-w9lnh/crc-debug-qkg44" event={"ID":"81993cb0-0a88-4f6e-9d88-f728b9b56141","Type":"ContainerStarted","Data":"6b874d0fcd96e590e42a50d09e38e7a23f4f049d01fb18c47a627269dc9c73cc"} Jan 04 13:23:19 crc kubenswrapper[5003]: I0104 13:23:19.806966 5003 scope.go:117] "RemoveContainer" containerID="7aa67a3c5c4fdbbd2e40aec0730029c1a3d38ba544747c5e09a0aef3629ccda4" Jan 04 13:23:19 crc kubenswrapper[5003]: E0104 13:23:19.807434 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:23:31 crc kubenswrapper[5003]: I0104 13:23:31.255769 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-w9lnh/crc-debug-qkg44" event={"ID":"81993cb0-0a88-4f6e-9d88-f728b9b56141","Type":"ContainerStarted","Data":"4948f7f30f847eb6e7092bbc4ec4e14e6c849f55cec819f0ffc81fc964bace2e"} Jan 04 13:23:31 crc kubenswrapper[5003]: I0104 13:23:31.273240 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-w9lnh/crc-debug-qkg44" podStartSLOduration=1.982297452 podStartE2EDuration="14.273217411s" podCreationTimestamp="2026-01-04 13:23:17 +0000 UTC" firstStartedPulling="2026-01-04 13:23:18.192186393 +0000 UTC m=+5713.665216234" lastFinishedPulling="2026-01-04 13:23:30.483106342 +0000 UTC m=+5725.956136193" observedRunningTime="2026-01-04 13:23:31.268610929 +0000 UTC m=+5726.741640770" watchObservedRunningTime="2026-01-04 13:23:31.273217411 +0000 UTC m=+5726.746247242" Jan 04 13:23:32 crc kubenswrapper[5003]: I0104 13:23:32.808493 5003 scope.go:117] "RemoveContainer" containerID="7aa67a3c5c4fdbbd2e40aec0730029c1a3d38ba544747c5e09a0aef3629ccda4" Jan 04 13:23:32 crc kubenswrapper[5003]: E0104 13:23:32.809819 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:23:43 crc kubenswrapper[5003]: I0104 13:23:43.807611 5003 scope.go:117] "RemoveContainer" containerID="7aa67a3c5c4fdbbd2e40aec0730029c1a3d38ba544747c5e09a0aef3629ccda4" Jan 04 13:23:43 crc kubenswrapper[5003]: E0104 13:23:43.810214 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:23:46 crc kubenswrapper[5003]: I0104 13:23:46.742097 5003 generic.go:334] "Generic (PLEG): container finished" podID="81993cb0-0a88-4f6e-9d88-f728b9b56141" containerID="4948f7f30f847eb6e7092bbc4ec4e14e6c849f55cec819f0ffc81fc964bace2e" exitCode=0 Jan 04 13:23:46 crc kubenswrapper[5003]: I0104 13:23:46.742182 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-w9lnh/crc-debug-qkg44" event={"ID":"81993cb0-0a88-4f6e-9d88-f728b9b56141","Type":"ContainerDied","Data":"4948f7f30f847eb6e7092bbc4ec4e14e6c849f55cec819f0ffc81fc964bace2e"} Jan 04 13:23:47 crc kubenswrapper[5003]: I0104 13:23:47.889541 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-w9lnh/crc-debug-qkg44" Jan 04 13:23:47 crc kubenswrapper[5003]: I0104 13:23:47.924742 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-w9lnh/crc-debug-qkg44"] Jan 04 13:23:47 crc kubenswrapper[5003]: I0104 13:23:47.933986 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-w9lnh/crc-debug-qkg44"] Jan 04 13:23:48 crc kubenswrapper[5003]: I0104 13:23:48.008750 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/81993cb0-0a88-4f6e-9d88-f728b9b56141-host\") pod \"81993cb0-0a88-4f6e-9d88-f728b9b56141\" (UID: \"81993cb0-0a88-4f6e-9d88-f728b9b56141\") " Jan 04 13:23:48 crc kubenswrapper[5003]: I0104 13:23:48.008917 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4xdgn\" (UniqueName: \"kubernetes.io/projected/81993cb0-0a88-4f6e-9d88-f728b9b56141-kube-api-access-4xdgn\") pod \"81993cb0-0a88-4f6e-9d88-f728b9b56141\" (UID: \"81993cb0-0a88-4f6e-9d88-f728b9b56141\") " Jan 04 13:23:48 crc kubenswrapper[5003]: I0104 13:23:48.008918 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/81993cb0-0a88-4f6e-9d88-f728b9b56141-host" (OuterVolumeSpecName: "host") pod "81993cb0-0a88-4f6e-9d88-f728b9b56141" (UID: "81993cb0-0a88-4f6e-9d88-f728b9b56141"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 13:23:48 crc kubenswrapper[5003]: I0104 13:23:48.009326 5003 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/81993cb0-0a88-4f6e-9d88-f728b9b56141-host\") on node \"crc\" DevicePath \"\"" Jan 04 13:23:48 crc kubenswrapper[5003]: I0104 13:23:48.018169 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81993cb0-0a88-4f6e-9d88-f728b9b56141-kube-api-access-4xdgn" (OuterVolumeSpecName: "kube-api-access-4xdgn") pod "81993cb0-0a88-4f6e-9d88-f728b9b56141" (UID: "81993cb0-0a88-4f6e-9d88-f728b9b56141"). InnerVolumeSpecName "kube-api-access-4xdgn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:23:48 crc kubenswrapper[5003]: I0104 13:23:48.111801 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4xdgn\" (UniqueName: \"kubernetes.io/projected/81993cb0-0a88-4f6e-9d88-f728b9b56141-kube-api-access-4xdgn\") on node \"crc\" DevicePath \"\"" Jan 04 13:23:48 crc kubenswrapper[5003]: I0104 13:23:48.766539 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6b874d0fcd96e590e42a50d09e38e7a23f4f049d01fb18c47a627269dc9c73cc" Jan 04 13:23:48 crc kubenswrapper[5003]: I0104 13:23:48.766669 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-w9lnh/crc-debug-qkg44" Jan 04 13:23:48 crc kubenswrapper[5003]: I0104 13:23:48.817341 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81993cb0-0a88-4f6e-9d88-f728b9b56141" path="/var/lib/kubelet/pods/81993cb0-0a88-4f6e-9d88-f728b9b56141/volumes" Jan 04 13:23:49 crc kubenswrapper[5003]: I0104 13:23:49.225420 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-w9lnh/crc-debug-44s4w"] Jan 04 13:23:49 crc kubenswrapper[5003]: E0104 13:23:49.225824 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81993cb0-0a88-4f6e-9d88-f728b9b56141" containerName="container-00" Jan 04 13:23:49 crc kubenswrapper[5003]: I0104 13:23:49.225837 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="81993cb0-0a88-4f6e-9d88-f728b9b56141" containerName="container-00" Jan 04 13:23:49 crc kubenswrapper[5003]: I0104 13:23:49.225984 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="81993cb0-0a88-4f6e-9d88-f728b9b56141" containerName="container-00" Jan 04 13:23:49 crc kubenswrapper[5003]: I0104 13:23:49.226596 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-w9lnh/crc-debug-44s4w" Jan 04 13:23:49 crc kubenswrapper[5003]: I0104 13:23:49.336282 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rd42\" (UniqueName: \"kubernetes.io/projected/a90177e2-ac68-417b-aefb-4e8b09a83401-kube-api-access-9rd42\") pod \"crc-debug-44s4w\" (UID: \"a90177e2-ac68-417b-aefb-4e8b09a83401\") " pod="openshift-must-gather-w9lnh/crc-debug-44s4w" Jan 04 13:23:49 crc kubenswrapper[5003]: I0104 13:23:49.336378 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a90177e2-ac68-417b-aefb-4e8b09a83401-host\") pod \"crc-debug-44s4w\" (UID: \"a90177e2-ac68-417b-aefb-4e8b09a83401\") " pod="openshift-must-gather-w9lnh/crc-debug-44s4w" Jan 04 13:23:49 crc kubenswrapper[5003]: I0104 13:23:49.438805 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rd42\" (UniqueName: \"kubernetes.io/projected/a90177e2-ac68-417b-aefb-4e8b09a83401-kube-api-access-9rd42\") pod \"crc-debug-44s4w\" (UID: \"a90177e2-ac68-417b-aefb-4e8b09a83401\") " pod="openshift-must-gather-w9lnh/crc-debug-44s4w" Jan 04 13:23:49 crc kubenswrapper[5003]: I0104 13:23:49.438870 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a90177e2-ac68-417b-aefb-4e8b09a83401-host\") pod \"crc-debug-44s4w\" (UID: \"a90177e2-ac68-417b-aefb-4e8b09a83401\") " pod="openshift-must-gather-w9lnh/crc-debug-44s4w" Jan 04 13:23:49 crc kubenswrapper[5003]: I0104 13:23:49.439042 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a90177e2-ac68-417b-aefb-4e8b09a83401-host\") pod \"crc-debug-44s4w\" (UID: \"a90177e2-ac68-417b-aefb-4e8b09a83401\") " pod="openshift-must-gather-w9lnh/crc-debug-44s4w" Jan 04 13:23:49 crc kubenswrapper[5003]: I0104 13:23:49.460515 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rd42\" (UniqueName: \"kubernetes.io/projected/a90177e2-ac68-417b-aefb-4e8b09a83401-kube-api-access-9rd42\") pod \"crc-debug-44s4w\" (UID: \"a90177e2-ac68-417b-aefb-4e8b09a83401\") " pod="openshift-must-gather-w9lnh/crc-debug-44s4w" Jan 04 13:23:49 crc kubenswrapper[5003]: I0104 13:23:49.545866 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-w9lnh/crc-debug-44s4w" Jan 04 13:23:49 crc kubenswrapper[5003]: I0104 13:23:49.779331 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-w9lnh/crc-debug-44s4w" event={"ID":"a90177e2-ac68-417b-aefb-4e8b09a83401","Type":"ContainerStarted","Data":"6d33d896a6fc61f5c57f5129d605b51394421fa3bbafd2b30595ebbd4322fb6b"} Jan 04 13:23:50 crc kubenswrapper[5003]: I0104 13:23:50.791944 5003 generic.go:334] "Generic (PLEG): container finished" podID="a90177e2-ac68-417b-aefb-4e8b09a83401" containerID="52daaebfd1aeb24d03be8c481077b1f70dbe604c8f0e2843ea742a7756c05d81" exitCode=1 Jan 04 13:23:50 crc kubenswrapper[5003]: I0104 13:23:50.792106 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-w9lnh/crc-debug-44s4w" event={"ID":"a90177e2-ac68-417b-aefb-4e8b09a83401","Type":"ContainerDied","Data":"52daaebfd1aeb24d03be8c481077b1f70dbe604c8f0e2843ea742a7756c05d81"} Jan 04 13:23:50 crc kubenswrapper[5003]: I0104 13:23:50.840520 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-w9lnh/crc-debug-44s4w"] Jan 04 13:23:50 crc kubenswrapper[5003]: I0104 13:23:50.846800 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-w9lnh/crc-debug-44s4w"] Jan 04 13:23:51 crc kubenswrapper[5003]: I0104 13:23:51.903295 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-w9lnh/crc-debug-44s4w" Jan 04 13:23:51 crc kubenswrapper[5003]: I0104 13:23:51.996867 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9rd42\" (UniqueName: \"kubernetes.io/projected/a90177e2-ac68-417b-aefb-4e8b09a83401-kube-api-access-9rd42\") pod \"a90177e2-ac68-417b-aefb-4e8b09a83401\" (UID: \"a90177e2-ac68-417b-aefb-4e8b09a83401\") " Jan 04 13:23:51 crc kubenswrapper[5003]: I0104 13:23:51.997090 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a90177e2-ac68-417b-aefb-4e8b09a83401-host\") pod \"a90177e2-ac68-417b-aefb-4e8b09a83401\" (UID: \"a90177e2-ac68-417b-aefb-4e8b09a83401\") " Jan 04 13:23:51 crc kubenswrapper[5003]: I0104 13:23:51.997347 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a90177e2-ac68-417b-aefb-4e8b09a83401-host" (OuterVolumeSpecName: "host") pod "a90177e2-ac68-417b-aefb-4e8b09a83401" (UID: "a90177e2-ac68-417b-aefb-4e8b09a83401"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 04 13:23:51 crc kubenswrapper[5003]: I0104 13:23:51.997867 5003 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a90177e2-ac68-417b-aefb-4e8b09a83401-host\") on node \"crc\" DevicePath \"\"" Jan 04 13:23:52 crc kubenswrapper[5003]: I0104 13:23:52.013528 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a90177e2-ac68-417b-aefb-4e8b09a83401-kube-api-access-9rd42" (OuterVolumeSpecName: "kube-api-access-9rd42") pod "a90177e2-ac68-417b-aefb-4e8b09a83401" (UID: "a90177e2-ac68-417b-aefb-4e8b09a83401"). InnerVolumeSpecName "kube-api-access-9rd42". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:23:52 crc kubenswrapper[5003]: I0104 13:23:52.099745 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9rd42\" (UniqueName: \"kubernetes.io/projected/a90177e2-ac68-417b-aefb-4e8b09a83401-kube-api-access-9rd42\") on node \"crc\" DevicePath \"\"" Jan 04 13:23:52 crc kubenswrapper[5003]: I0104 13:23:52.814435 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-w9lnh/crc-debug-44s4w" Jan 04 13:23:52 crc kubenswrapper[5003]: I0104 13:23:52.817965 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a90177e2-ac68-417b-aefb-4e8b09a83401" path="/var/lib/kubelet/pods/a90177e2-ac68-417b-aefb-4e8b09a83401/volumes" Jan 04 13:23:52 crc kubenswrapper[5003]: I0104 13:23:52.818801 5003 scope.go:117] "RemoveContainer" containerID="52daaebfd1aeb24d03be8c481077b1f70dbe604c8f0e2843ea742a7756c05d81" Jan 04 13:23:57 crc kubenswrapper[5003]: I0104 13:23:57.807834 5003 scope.go:117] "RemoveContainer" containerID="7aa67a3c5c4fdbbd2e40aec0730029c1a3d38ba544747c5e09a0aef3629ccda4" Jan 04 13:23:57 crc kubenswrapper[5003]: E0104 13:23:57.808684 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:24:12 crc kubenswrapper[5003]: I0104 13:24:12.807902 5003 scope.go:117] "RemoveContainer" containerID="7aa67a3c5c4fdbbd2e40aec0730029c1a3d38ba544747c5e09a0aef3629ccda4" Jan 04 13:24:12 crc kubenswrapper[5003]: E0104 13:24:12.809076 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:24:14 crc kubenswrapper[5003]: I0104 13:24:14.493346 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6d997bd4b5-nmr2n_301f653f-93ac-4940-9505-32414c777153/init/0.log" Jan 04 13:24:14 crc kubenswrapper[5003]: I0104 13:24:14.706154 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6d997bd4b5-nmr2n_301f653f-93ac-4940-9505-32414c777153/init/0.log" Jan 04 13:24:14 crc kubenswrapper[5003]: I0104 13:24:14.764514 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6d997bd4b5-nmr2n_301f653f-93ac-4940-9505-32414c777153/dnsmasq-dns/0.log" Jan 04 13:24:14 crc kubenswrapper[5003]: I0104 13:24:14.906801 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-66dd6c68-jl7x7_ab1bcdeb-2e22-414d-9214-858e867a6617/keystone-api/0.log" Jan 04 13:24:14 crc kubenswrapper[5003]: I0104 13:24:14.998278 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-80e0-account-create-update-xqp87_26b75c4c-5b23-426d-9880-d2d9a8daebd3/mariadb-account-create-update/0.log" Jan 04 13:24:15 crc kubenswrapper[5003]: I0104 13:24:15.153656 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-bootstrap-n76kt_940c2752-d359-4e0f-bcca-cfb3689750d4/keystone-bootstrap/0.log" Jan 04 13:24:15 crc kubenswrapper[5003]: I0104 13:24:15.211025 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-db-create-pt9b2_9610d2b5-f6c7-4144-a02c-94b8b418f540/mariadb-database-create/0.log" Jan 04 13:24:15 crc kubenswrapper[5003]: I0104 13:24:15.385715 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-db-sync-622cf_05dffc83-fce8-4e1c-8a0a-9120efe4b704/keystone-db-sync/0.log" Jan 04 13:24:15 crc kubenswrapper[5003]: I0104 13:24:15.549766 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-copy-data_c3e0f94e-4084-44d2-84ea-9da47439225d/adoption/0.log" Jan 04 13:24:15 crc kubenswrapper[5003]: I0104 13:24:15.826671 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_00679dc8-7acc-4aaf-afe9-105458b3fd33/mysql-bootstrap/0.log" Jan 04 13:24:16 crc kubenswrapper[5003]: I0104 13:24:16.087593 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_00679dc8-7acc-4aaf-afe9-105458b3fd33/mysql-bootstrap/0.log" Jan 04 13:24:16 crc kubenswrapper[5003]: I0104 13:24:16.116806 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_00679dc8-7acc-4aaf-afe9-105458b3fd33/galera/0.log" Jan 04 13:24:16 crc kubenswrapper[5003]: I0104 13:24:16.333825 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c48613d9-a038-4293-855b-6d05642cc386/mysql-bootstrap/0.log" Jan 04 13:24:16 crc kubenswrapper[5003]: I0104 13:24:16.728454 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_729222f1-c416-4981-9312-7c642a0fe9f1/memcached/0.log" Jan 04 13:24:16 crc kubenswrapper[5003]: I0104 13:24:16.812082 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c48613d9-a038-4293-855b-6d05642cc386/galera/0.log" Jan 04 13:24:16 crc kubenswrapper[5003]: I0104 13:24:16.834685 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c48613d9-a038-4293-855b-6d05642cc386/mysql-bootstrap/0.log" Jan 04 13:24:16 crc kubenswrapper[5003]: I0104 13:24:16.970826 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_bd1398e5-5eb1-48b1-b460-343c25250504/openstackclient/0.log" Jan 04 13:24:17 crc kubenswrapper[5003]: I0104 13:24:17.069724 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-copy-data_b82c43c6-27e4-4e10-a312-096301b0e50f/adoption/0.log" Jan 04 13:24:17 crc kubenswrapper[5003]: I0104 13:24:17.179587 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_326adc48-46fc-4f3a-bbba-99e081d4f6ff/openstack-network-exporter/0.log" Jan 04 13:24:17 crc kubenswrapper[5003]: I0104 13:24:17.306667 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_326adc48-46fc-4f3a-bbba-99e081d4f6ff/ovn-northd/0.log" Jan 04 13:24:17 crc kubenswrapper[5003]: I0104 13:24:17.374327 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_0ff4f026-1e2a-44a7-848a-ccd5566ece95/openstack-network-exporter/0.log" Jan 04 13:24:17 crc kubenswrapper[5003]: I0104 13:24:17.418293 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_0ff4f026-1e2a-44a7-848a-ccd5566ece95/ovsdbserver-nb/0.log" Jan 04 13:24:17 crc kubenswrapper[5003]: I0104 13:24:17.590734 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_c62c7c4b-4dd2-40c8-b209-e29c1dfa255f/openstack-network-exporter/0.log" Jan 04 13:24:17 crc kubenswrapper[5003]: I0104 13:24:17.594123 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_c62c7c4b-4dd2-40c8-b209-e29c1dfa255f/ovsdbserver-nb/0.log" Jan 04 13:24:17 crc kubenswrapper[5003]: I0104 13:24:17.853265 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_486fc164-58ae-47f8-bcd1-2c98e7c12b8f/openstack-network-exporter/0.log" Jan 04 13:24:17 crc kubenswrapper[5003]: I0104 13:24:17.897726 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_486fc164-58ae-47f8-bcd1-2c98e7c12b8f/ovsdbserver-nb/0.log" Jan 04 13:24:18 crc kubenswrapper[5003]: I0104 13:24:18.002394 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_bfffcf93-0d03-45b0-ad26-ed2c799360f0/openstack-network-exporter/0.log" Jan 04 13:24:18 crc kubenswrapper[5003]: I0104 13:24:18.084328 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_bfffcf93-0d03-45b0-ad26-ed2c799360f0/ovsdbserver-sb/0.log" Jan 04 13:24:18 crc kubenswrapper[5003]: I0104 13:24:18.208441 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_9f3c6348-1679-407c-92e7-b8c0afa0591b/openstack-network-exporter/0.log" Jan 04 13:24:18 crc kubenswrapper[5003]: I0104 13:24:18.215253 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_9f3c6348-1679-407c-92e7-b8c0afa0591b/ovsdbserver-sb/0.log" Jan 04 13:24:18 crc kubenswrapper[5003]: I0104 13:24:18.426765 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_ad42204b-8d7a-415b-a30c-2e8e4fb242d8/ovsdbserver-sb/0.log" Jan 04 13:24:18 crc kubenswrapper[5003]: I0104 13:24:18.436691 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_ad42204b-8d7a-415b-a30c-2e8e4fb242d8/openstack-network-exporter/0.log" Jan 04 13:24:18 crc kubenswrapper[5003]: I0104 13:24:18.520248 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_006934d0-3792-4855-ae29-d1d336d53937/setup-container/0.log" Jan 04 13:24:18 crc kubenswrapper[5003]: I0104 13:24:18.721462 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_006934d0-3792-4855-ae29-d1d336d53937/setup-container/0.log" Jan 04 13:24:18 crc kubenswrapper[5003]: I0104 13:24:18.769422 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_006934d0-3792-4855-ae29-d1d336d53937/rabbitmq/0.log" Jan 04 13:24:18 crc kubenswrapper[5003]: I0104 13:24:18.790563 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_8934ee0c-1985-4bc1-95cb-50fcbbf7909d/setup-container/0.log" Jan 04 13:24:19 crc kubenswrapper[5003]: I0104 13:24:19.007397 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_8934ee0c-1985-4bc1-95cb-50fcbbf7909d/rabbitmq/0.log" Jan 04 13:24:19 crc kubenswrapper[5003]: I0104 13:24:19.048312 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_8934ee0c-1985-4bc1-95cb-50fcbbf7909d/setup-container/0.log" Jan 04 13:24:22 crc kubenswrapper[5003]: I0104 13:24:22.835655 5003 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","poda90177e2-ac68-417b-aefb-4e8b09a83401"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort poda90177e2-ac68-417b-aefb-4e8b09a83401] : Timed out while waiting for systemd to remove kubepods-besteffort-poda90177e2_ac68_417b_aefb_4e8b09a83401.slice" Jan 04 13:24:22 crc kubenswrapper[5003]: E0104 13:24:22.836509 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort poda90177e2-ac68-417b-aefb-4e8b09a83401] : unable to destroy cgroup paths for cgroup [kubepods besteffort poda90177e2-ac68-417b-aefb-4e8b09a83401] : Timed out while waiting for systemd to remove kubepods-besteffort-poda90177e2_ac68_417b_aefb_4e8b09a83401.slice" pod="openshift-must-gather-w9lnh/crc-debug-44s4w" podUID="a90177e2-ac68-417b-aefb-4e8b09a83401" Jan 04 13:24:23 crc kubenswrapper[5003]: I0104 13:24:23.159831 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-w9lnh/crc-debug-44s4w" Jan 04 13:24:24 crc kubenswrapper[5003]: I0104 13:24:24.813906 5003 scope.go:117] "RemoveContainer" containerID="7aa67a3c5c4fdbbd2e40aec0730029c1a3d38ba544747c5e09a0aef3629ccda4" Jan 04 13:24:24 crc kubenswrapper[5003]: E0104 13:24:24.814303 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:24:37 crc kubenswrapper[5003]: I0104 13:24:37.807170 5003 scope.go:117] "RemoveContainer" containerID="7aa67a3c5c4fdbbd2e40aec0730029c1a3d38ba544747c5e09a0aef3629ccda4" Jan 04 13:24:37 crc kubenswrapper[5003]: E0104 13:24:37.808184 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:24:39 crc kubenswrapper[5003]: I0104 13:24:39.421168 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-f6f74d6db-89lgv_c8ead0af-3ab9-4f13-b94f-83b8b48c60b7/manager/0.log" Jan 04 13:24:39 crc kubenswrapper[5003]: I0104 13:24:39.625325 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-78979fc445-lng2r_916a829e-a1f9-4c4c-9253-b5d1b901f2f3/manager/0.log" Jan 04 13:24:39 crc kubenswrapper[5003]: I0104 13:24:39.692525 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx_4eb99728-449d-4016-86e5-47e74e5f97e9/util/0.log" Jan 04 13:24:39 crc kubenswrapper[5003]: I0104 13:24:39.909321 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx_4eb99728-449d-4016-86e5-47e74e5f97e9/pull/0.log" Jan 04 13:24:39 crc kubenswrapper[5003]: I0104 13:24:39.926529 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx_4eb99728-449d-4016-86e5-47e74e5f97e9/pull/0.log" Jan 04 13:24:39 crc kubenswrapper[5003]: I0104 13:24:39.959499 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx_4eb99728-449d-4016-86e5-47e74e5f97e9/util/0.log" Jan 04 13:24:40 crc kubenswrapper[5003]: I0104 13:24:40.187249 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx_4eb99728-449d-4016-86e5-47e74e5f97e9/util/0.log" Jan 04 13:24:40 crc kubenswrapper[5003]: I0104 13:24:40.209555 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx_4eb99728-449d-4016-86e5-47e74e5f97e9/extract/0.log" Jan 04 13:24:40 crc kubenswrapper[5003]: I0104 13:24:40.210050 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_de70fa16a7ca5622188f18febf39673d50b3bc4dd3ef258c154a3707dd7bzpx_4eb99728-449d-4016-86e5-47e74e5f97e9/pull/0.log" Jan 04 13:24:40 crc kubenswrapper[5003]: I0104 13:24:40.419401 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-66f8b87655-5r27f_8c9bc809-322b-4079-a1ea-533ce9239181/manager/0.log" Jan 04 13:24:40 crc kubenswrapper[5003]: I0104 13:24:40.509443 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-7b549fc966-427tw_218ad7b5-e7ca-4f1a-b863-1f160424b195/manager/0.log" Jan 04 13:24:40 crc kubenswrapper[5003]: I0104 13:24:40.639955 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-658dd65b86-fd9hs_549e2841-20b6-4018-85f2-0bc091560658/manager/0.log" Jan 04 13:24:40 crc kubenswrapper[5003]: I0104 13:24:40.777192 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-7f5ddd8d7b-nbbqt_957cb0f0-d8f1-43a9-8d05-0b7db926c066/manager/0.log" Jan 04 13:24:41 crc kubenswrapper[5003]: I0104 13:24:41.038826 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-f99f54bc8-j5hvw_04d05f95-12aa-4a8b-9f4c-721247547b88/manager/0.log" Jan 04 13:24:41 crc kubenswrapper[5003]: I0104 13:24:41.194933 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-6d99759cf-mwxxf_c04604e1-db01-4451-8a27-e439e8f5a94c/manager/0.log" Jan 04 13:24:41 crc kubenswrapper[5003]: I0104 13:24:41.322484 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-568985c78-94d29_157189ac-3baf-49a7-b37b-dacddf4f43af/manager/0.log" Jan 04 13:24:41 crc kubenswrapper[5003]: I0104 13:24:41.333656 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-598945d5b8-869w7_c317719a-c16c-4221-9fa4-029bc0d7a004/manager/0.log" Jan 04 13:24:41 crc kubenswrapper[5003]: I0104 13:24:41.610683 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-7cd87b778f-n2549_ef8d1d25-5343-4471-80aa-df8739a0f5d6/manager/0.log" Jan 04 13:24:41 crc kubenswrapper[5003]: I0104 13:24:41.833843 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-7b88bfc995-c45pv_b1d992ec-f4ef-4925-a225-6407ca5cea0a/manager/0.log" Jan 04 13:24:42 crc kubenswrapper[5003]: I0104 13:24:42.086070 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-5fbbf8b6cc-h7zz8_bc716961-01bb-4e23-a58e-f44f81d91bee/manager/0.log" Jan 04 13:24:42 crc kubenswrapper[5003]: I0104 13:24:42.090883 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-68c649d9d-2kxcj_25453bdc-8892-48d4-aca5-cb9549e9e59d/manager/0.log" Jan 04 13:24:42 crc kubenswrapper[5003]: I0104 13:24:42.292625 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-5c4776bcc5qnqc9_682c6705-26e9-4d83-aaa8-48fa906104dc/manager/0.log" Jan 04 13:24:42 crc kubenswrapper[5003]: I0104 13:24:42.812933 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-6879547b79-t72rf_24d858a4-77b9-4c14-a43e-535f93982288/operator/0.log" Jan 04 13:24:43 crc kubenswrapper[5003]: I0104 13:24:43.000235 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-fswhr_1ed8e853-ba61-448c-a7bb-3cfeb66a6c81/registry-server/0.log" Jan 04 13:24:43 crc kubenswrapper[5003]: I0104 13:24:43.146261 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-bf6d4f946-m2pkz_b7dc1fbb-7dff-4a95-90bf-d7f4dd97698f/manager/0.log" Jan 04 13:24:43 crc kubenswrapper[5003]: I0104 13:24:43.239467 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-9b6f8f78c-pfsq5_033a9e41-3e14-4a99-9e2c-9ad9151b8cea/manager/0.log" Jan 04 13:24:43 crc kubenswrapper[5003]: I0104 13:24:43.285966 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7df7568dd6-flxhv_a25a33a1-bb9c-4cd8-9a41-f3c6f1ef5c14/manager/0.log" Jan 04 13:24:43 crc kubenswrapper[5003]: I0104 13:24:43.500135 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-6jlbf_e798bc76-3f22-4bf8-b337-bf3bf03ca3b2/operator/0.log" Jan 04 13:24:43 crc kubenswrapper[5003]: I0104 13:24:43.511322 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bb586bbf4-jb94x_a1e2f0e1-3b63-4ec0-a0e0-9e84f3dc891a/manager/0.log" Jan 04 13:24:43 crc kubenswrapper[5003]: I0104 13:24:43.742454 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-68d988df55-vkj55_e9ebf167-b161-4984-9c2b-caeee988e697/manager/0.log" Jan 04 13:24:43 crc kubenswrapper[5003]: I0104 13:24:43.789718 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-6c866cfdcb-d4nbt_6d6813e0-7085-4a6e-af8a-7acb60007841/manager/0.log" Jan 04 13:24:43 crc kubenswrapper[5003]: I0104 13:24:43.827494 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-9dbdf6486-4trlm_03fd52b2-dba9-4298-b44b-3ef8c22a4237/manager/0.log" Jan 04 13:24:48 crc kubenswrapper[5003]: I0104 13:24:48.807005 5003 scope.go:117] "RemoveContainer" containerID="7aa67a3c5c4fdbbd2e40aec0730029c1a3d38ba544747c5e09a0aef3629ccda4" Jan 04 13:24:48 crc kubenswrapper[5003]: E0104 13:24:48.807701 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:25:01 crc kubenswrapper[5003]: I0104 13:25:01.806624 5003 scope.go:117] "RemoveContainer" containerID="7aa67a3c5c4fdbbd2e40aec0730029c1a3d38ba544747c5e09a0aef3629ccda4" Jan 04 13:25:01 crc kubenswrapper[5003]: E0104 13:25:01.808006 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:25:07 crc kubenswrapper[5003]: I0104 13:25:07.196742 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-zgmzz_c99d5981-7c0d-49d6-8b8e-8cb2c8c1027a/control-plane-machine-set-operator/0.log" Jan 04 13:25:07 crc kubenswrapper[5003]: I0104 13:25:07.404622 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-dslqq_ef5fdbe3-490c-476b-af83-a810d6e5a888/kube-rbac-proxy/0.log" Jan 04 13:25:07 crc kubenswrapper[5003]: I0104 13:25:07.476133 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-dslqq_ef5fdbe3-490c-476b-af83-a810d6e5a888/machine-api-operator/0.log" Jan 04 13:25:15 crc kubenswrapper[5003]: I0104 13:25:15.391585 5003 scope.go:117] "RemoveContainer" containerID="3f05a8daaa10ee961963f9d44213353d814d2fa7fa196b2c1992e20dfb045a73" Jan 04 13:25:15 crc kubenswrapper[5003]: I0104 13:25:15.427360 5003 scope.go:117] "RemoveContainer" containerID="8c1d0154f9dfed67d7187c2c639352e2b94fe3f323e38c9e9c9e6fde56e79cec" Jan 04 13:25:15 crc kubenswrapper[5003]: I0104 13:25:15.454182 5003 scope.go:117] "RemoveContainer" containerID="b6b45c7d3eabea0e3e54c917ab563b6a20e448a2adefddc404e3f39257014458" Jan 04 13:25:15 crc kubenswrapper[5003]: I0104 13:25:15.501576 5003 scope.go:117] "RemoveContainer" containerID="a2a9887b514c070ce3ed6faff8b685c695e3ad1bd22e8c70468e23cbea93f9b5" Jan 04 13:25:16 crc kubenswrapper[5003]: I0104 13:25:16.806594 5003 scope.go:117] "RemoveContainer" containerID="7aa67a3c5c4fdbbd2e40aec0730029c1a3d38ba544747c5e09a0aef3629ccda4" Jan 04 13:25:16 crc kubenswrapper[5003]: E0104 13:25:16.807044 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:25:24 crc kubenswrapper[5003]: I0104 13:25:24.251924 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-6h4sb_c58a6400-ce47-4bb9-873d-12da105e4794/cert-manager-controller/0.log" Jan 04 13:25:24 crc kubenswrapper[5003]: I0104 13:25:24.433351 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-9njcn_e3aa0270-77e2-4f4b-bbf7-8957259dc9d0/cert-manager-cainjector/0.log" Jan 04 13:25:24 crc kubenswrapper[5003]: I0104 13:25:24.538560 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-hvhx7_27b975c9-a239-4818-b46c-f22360f31341/cert-manager-webhook/0.log" Jan 04 13:25:29 crc kubenswrapper[5003]: I0104 13:25:29.807850 5003 scope.go:117] "RemoveContainer" containerID="7aa67a3c5c4fdbbd2e40aec0730029c1a3d38ba544747c5e09a0aef3629ccda4" Jan 04 13:25:29 crc kubenswrapper[5003]: E0104 13:25:29.810420 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:25:39 crc kubenswrapper[5003]: I0104 13:25:39.237773 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6ff7998486-zxckq_86712d7d-38bd-4a51-842d-c168bf155f04/nmstate-console-plugin/0.log" Jan 04 13:25:39 crc kubenswrapper[5003]: I0104 13:25:39.398562 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-v2x7n_10341446-4c79-4087-8c82-80ffcb35f39a/nmstate-handler/0.log" Jan 04 13:25:39 crc kubenswrapper[5003]: I0104 13:25:39.447614 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f7f7578db-fldh6_1cbd3f4f-d288-4588-a0c4-1e616c0e510a/kube-rbac-proxy/0.log" Jan 04 13:25:39 crc kubenswrapper[5003]: I0104 13:25:39.752758 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f7f7578db-fldh6_1cbd3f4f-d288-4588-a0c4-1e616c0e510a/nmstate-metrics/0.log" Jan 04 13:25:39 crc kubenswrapper[5003]: I0104 13:25:39.956860 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-f8fb84555-tc9qf_1e4f1d48-3190-4f87-8052-8c722ca87582/nmstate-webhook/0.log" Jan 04 13:25:39 crc kubenswrapper[5003]: I0104 13:25:39.958589 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-6769fb99d-nngv8_034caf29-3d2b-442f-9524-bbb547b0b8bc/nmstate-operator/0.log" Jan 04 13:25:42 crc kubenswrapper[5003]: I0104 13:25:42.807152 5003 scope.go:117] "RemoveContainer" containerID="7aa67a3c5c4fdbbd2e40aec0730029c1a3d38ba544747c5e09a0aef3629ccda4" Jan 04 13:25:42 crc kubenswrapper[5003]: E0104 13:25:42.807824 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:25:54 crc kubenswrapper[5003]: I0104 13:25:54.813624 5003 scope.go:117] "RemoveContainer" containerID="7aa67a3c5c4fdbbd2e40aec0730029c1a3d38ba544747c5e09a0aef3629ccda4" Jan 04 13:25:54 crc kubenswrapper[5003]: E0104 13:25:54.814918 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:25:57 crc kubenswrapper[5003]: I0104 13:25:57.296453 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5bddd4b946-72lpp_428d7c60-fd2b-430e-9a21-e6d1ec480d00/kube-rbac-proxy/0.log" Jan 04 13:25:57 crc kubenswrapper[5003]: I0104 13:25:57.600145 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sm4fk_09bc370d-2ed0-4d5b-a050-22d3b22218ae/cp-frr-files/0.log" Jan 04 13:25:57 crc kubenswrapper[5003]: I0104 13:25:57.666770 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5bddd4b946-72lpp_428d7c60-fd2b-430e-9a21-e6d1ec480d00/controller/0.log" Jan 04 13:25:57 crc kubenswrapper[5003]: I0104 13:25:57.864306 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sm4fk_09bc370d-2ed0-4d5b-a050-22d3b22218ae/cp-frr-files/0.log" Jan 04 13:25:57 crc kubenswrapper[5003]: I0104 13:25:57.886879 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sm4fk_09bc370d-2ed0-4d5b-a050-22d3b22218ae/cp-metrics/0.log" Jan 04 13:25:57 crc kubenswrapper[5003]: I0104 13:25:57.941161 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sm4fk_09bc370d-2ed0-4d5b-a050-22d3b22218ae/cp-reloader/0.log" Jan 04 13:25:57 crc kubenswrapper[5003]: I0104 13:25:57.953560 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sm4fk_09bc370d-2ed0-4d5b-a050-22d3b22218ae/cp-reloader/0.log" Jan 04 13:25:58 crc kubenswrapper[5003]: I0104 13:25:58.128080 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sm4fk_09bc370d-2ed0-4d5b-a050-22d3b22218ae/cp-reloader/0.log" Jan 04 13:25:58 crc kubenswrapper[5003]: I0104 13:25:58.164834 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sm4fk_09bc370d-2ed0-4d5b-a050-22d3b22218ae/cp-frr-files/0.log" Jan 04 13:25:58 crc kubenswrapper[5003]: I0104 13:25:58.168049 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sm4fk_09bc370d-2ed0-4d5b-a050-22d3b22218ae/cp-metrics/0.log" Jan 04 13:25:58 crc kubenswrapper[5003]: I0104 13:25:58.214126 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sm4fk_09bc370d-2ed0-4d5b-a050-22d3b22218ae/cp-metrics/0.log" Jan 04 13:25:58 crc kubenswrapper[5003]: I0104 13:25:58.386209 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sm4fk_09bc370d-2ed0-4d5b-a050-22d3b22218ae/cp-frr-files/0.log" Jan 04 13:25:58 crc kubenswrapper[5003]: I0104 13:25:58.390830 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sm4fk_09bc370d-2ed0-4d5b-a050-22d3b22218ae/cp-reloader/0.log" Jan 04 13:25:58 crc kubenswrapper[5003]: I0104 13:25:58.406272 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sm4fk_09bc370d-2ed0-4d5b-a050-22d3b22218ae/cp-metrics/0.log" Jan 04 13:25:58 crc kubenswrapper[5003]: I0104 13:25:58.434464 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sm4fk_09bc370d-2ed0-4d5b-a050-22d3b22218ae/controller/0.log" Jan 04 13:25:58 crc kubenswrapper[5003]: I0104 13:25:58.595327 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sm4fk_09bc370d-2ed0-4d5b-a050-22d3b22218ae/frr-metrics/0.log" Jan 04 13:25:58 crc kubenswrapper[5003]: I0104 13:25:58.612086 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sm4fk_09bc370d-2ed0-4d5b-a050-22d3b22218ae/kube-rbac-proxy/0.log" Jan 04 13:25:58 crc kubenswrapper[5003]: I0104 13:25:58.701269 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sm4fk_09bc370d-2ed0-4d5b-a050-22d3b22218ae/kube-rbac-proxy-frr/0.log" Jan 04 13:25:58 crc kubenswrapper[5003]: I0104 13:25:58.822770 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sm4fk_09bc370d-2ed0-4d5b-a050-22d3b22218ae/reloader/0.log" Jan 04 13:25:58 crc kubenswrapper[5003]: I0104 13:25:58.982386 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7784b6fcf-k5rzq_2878f402-a6b2-4abd-9362-1a33ba2c7cfa/frr-k8s-webhook-server/0.log" Jan 04 13:25:59 crc kubenswrapper[5003]: I0104 13:25:59.239515 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-7887cff594-wfltc_e0b25671-0ecf-4f1e-a6ba-700b4fe3fb93/manager/0.log" Jan 04 13:25:59 crc kubenswrapper[5003]: I0104 13:25:59.326212 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-5c4fdcc4bf-rtzgn_8515cdcc-9add-47db-b73e-f72e547a1727/webhook-server/0.log" Jan 04 13:25:59 crc kubenswrapper[5003]: I0104 13:25:59.585514 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-gdkc4_111f1bb7-53d6-4079-a637-f586e03ba8c5/kube-rbac-proxy/0.log" Jan 04 13:26:00 crc kubenswrapper[5003]: I0104 13:26:00.143242 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-gdkc4_111f1bb7-53d6-4079-a637-f586e03ba8c5/speaker/0.log" Jan 04 13:26:00 crc kubenswrapper[5003]: I0104 13:26:00.540906 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-sm4fk_09bc370d-2ed0-4d5b-a050-22d3b22218ae/frr/0.log" Jan 04 13:26:07 crc kubenswrapper[5003]: I0104 13:26:07.807619 5003 scope.go:117] "RemoveContainer" containerID="7aa67a3c5c4fdbbd2e40aec0730029c1a3d38ba544747c5e09a0aef3629ccda4" Jan 04 13:26:07 crc kubenswrapper[5003]: E0104 13:26:07.808929 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:26:15 crc kubenswrapper[5003]: I0104 13:26:15.651782 5003 scope.go:117] "RemoveContainer" containerID="62ccd8fa89d3b453df82e0bc1cdfe478a889d349f3b21b45518caa1b7bb53e80" Jan 04 13:26:16 crc kubenswrapper[5003]: I0104 13:26:16.134372 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8_c70c4c5a-820d-4671-a4a8-dae25ff5f3f3/util/0.log" Jan 04 13:26:16 crc kubenswrapper[5003]: I0104 13:26:16.304762 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8_c70c4c5a-820d-4671-a4a8-dae25ff5f3f3/util/0.log" Jan 04 13:26:16 crc kubenswrapper[5003]: I0104 13:26:16.377479 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8_c70c4c5a-820d-4671-a4a8-dae25ff5f3f3/pull/0.log" Jan 04 13:26:16 crc kubenswrapper[5003]: I0104 13:26:16.408516 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8_c70c4c5a-820d-4671-a4a8-dae25ff5f3f3/pull/0.log" Jan 04 13:26:16 crc kubenswrapper[5003]: I0104 13:26:16.578966 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8_c70c4c5a-820d-4671-a4a8-dae25ff5f3f3/util/0.log" Jan 04 13:26:16 crc kubenswrapper[5003]: I0104 13:26:16.583324 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8_c70c4c5a-820d-4671-a4a8-dae25ff5f3f3/extract/0.log" Jan 04 13:26:16 crc kubenswrapper[5003]: I0104 13:26:16.592522 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a95wl8_c70c4c5a-820d-4671-a4a8-dae25ff5f3f3/pull/0.log" Jan 04 13:26:16 crc kubenswrapper[5003]: I0104 13:26:16.769288 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s_afaa573c-f32d-423d-a608-1d5443d9b498/util/0.log" Jan 04 13:26:16 crc kubenswrapper[5003]: I0104 13:26:16.982048 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s_afaa573c-f32d-423d-a608-1d5443d9b498/util/0.log" Jan 04 13:26:17 crc kubenswrapper[5003]: I0104 13:26:17.017271 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s_afaa573c-f32d-423d-a608-1d5443d9b498/pull/0.log" Jan 04 13:26:17 crc kubenswrapper[5003]: I0104 13:26:17.032633 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s_afaa573c-f32d-423d-a608-1d5443d9b498/pull/0.log" Jan 04 13:26:17 crc kubenswrapper[5003]: I0104 13:26:17.207097 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s_afaa573c-f32d-423d-a608-1d5443d9b498/pull/0.log" Jan 04 13:26:17 crc kubenswrapper[5003]: I0104 13:26:17.223731 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s_afaa573c-f32d-423d-a608-1d5443d9b498/util/0.log" Jan 04 13:26:17 crc kubenswrapper[5003]: I0104 13:26:17.271448 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5b7fccbebf0e22d2dd769066fa7aaa90fd620c5db34f2af6c91e4319d4msb7s_afaa573c-f32d-423d-a608-1d5443d9b498/extract/0.log" Jan 04 13:26:17 crc kubenswrapper[5003]: I0104 13:26:17.410979 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj_d72d2173-7ced-4d5d-bd95-ee65cbf3ee66/util/0.log" Jan 04 13:26:17 crc kubenswrapper[5003]: I0104 13:26:17.641523 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj_d72d2173-7ced-4d5d-bd95-ee65cbf3ee66/pull/0.log" Jan 04 13:26:17 crc kubenswrapper[5003]: I0104 13:26:17.654282 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj_d72d2173-7ced-4d5d-bd95-ee65cbf3ee66/util/0.log" Jan 04 13:26:17 crc kubenswrapper[5003]: I0104 13:26:17.664599 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj_d72d2173-7ced-4d5d-bd95-ee65cbf3ee66/pull/0.log" Jan 04 13:26:17 crc kubenswrapper[5003]: I0104 13:26:17.863680 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj_d72d2173-7ced-4d5d-bd95-ee65cbf3ee66/pull/0.log" Jan 04 13:26:17 crc kubenswrapper[5003]: I0104 13:26:17.879086 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj_d72d2173-7ced-4d5d-bd95-ee65cbf3ee66/util/0.log" Jan 04 13:26:17 crc kubenswrapper[5003]: I0104 13:26:17.886896 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98085b0df3808ebec39f9f9529f737144fe2dbcdaa4f334014817c0fa85tptj_d72d2173-7ced-4d5d-bd95-ee65cbf3ee66/extract/0.log" Jan 04 13:26:18 crc kubenswrapper[5003]: I0104 13:26:18.059694 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l2psm_a82e6f0c-30d6-4379-926b-4ee8cbd7ff1f/extract-utilities/0.log" Jan 04 13:26:18 crc kubenswrapper[5003]: I0104 13:26:18.294939 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l2psm_a82e6f0c-30d6-4379-926b-4ee8cbd7ff1f/extract-utilities/0.log" Jan 04 13:26:18 crc kubenswrapper[5003]: I0104 13:26:18.308700 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l2psm_a82e6f0c-30d6-4379-926b-4ee8cbd7ff1f/extract-content/0.log" Jan 04 13:26:18 crc kubenswrapper[5003]: I0104 13:26:18.317390 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l2psm_a82e6f0c-30d6-4379-926b-4ee8cbd7ff1f/extract-content/0.log" Jan 04 13:26:18 crc kubenswrapper[5003]: I0104 13:26:18.507129 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l2psm_a82e6f0c-30d6-4379-926b-4ee8cbd7ff1f/extract-content/0.log" Jan 04 13:26:18 crc kubenswrapper[5003]: I0104 13:26:18.534757 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l2psm_a82e6f0c-30d6-4379-926b-4ee8cbd7ff1f/extract-utilities/0.log" Jan 04 13:26:18 crc kubenswrapper[5003]: I0104 13:26:18.730129 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-4v6d2_b778690e-8cf2-43d0-ac8b-f992c11318a8/extract-utilities/0.log" Jan 04 13:26:19 crc kubenswrapper[5003]: I0104 13:26:19.056253 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-4v6d2_b778690e-8cf2-43d0-ac8b-f992c11318a8/extract-utilities/0.log" Jan 04 13:26:19 crc kubenswrapper[5003]: I0104 13:26:19.130461 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-4v6d2_b778690e-8cf2-43d0-ac8b-f992c11318a8/extract-content/0.log" Jan 04 13:26:19 crc kubenswrapper[5003]: I0104 13:26:19.141064 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-4v6d2_b778690e-8cf2-43d0-ac8b-f992c11318a8/extract-content/0.log" Jan 04 13:26:19 crc kubenswrapper[5003]: I0104 13:26:19.387878 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l2psm_a82e6f0c-30d6-4379-926b-4ee8cbd7ff1f/registry-server/0.log" Jan 04 13:26:19 crc kubenswrapper[5003]: I0104 13:26:19.430429 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-4v6d2_b778690e-8cf2-43d0-ac8b-f992c11318a8/extract-utilities/0.log" Jan 04 13:26:19 crc kubenswrapper[5003]: I0104 13:26:19.440339 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-4v6d2_b778690e-8cf2-43d0-ac8b-f992c11318a8/extract-content/0.log" Jan 04 13:26:19 crc kubenswrapper[5003]: I0104 13:26:19.731450 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-7z829_bb982b9b-26c8-4b54-b519-5d1c78c38ada/marketplace-operator/0.log" Jan 04 13:26:19 crc kubenswrapper[5003]: I0104 13:26:19.799117 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nbxfr_4dd9a653-4387-44cf-82a3-e6a7ec6713bf/extract-utilities/0.log" Jan 04 13:26:19 crc kubenswrapper[5003]: I0104 13:26:19.991907 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nbxfr_4dd9a653-4387-44cf-82a3-e6a7ec6713bf/extract-utilities/0.log" Jan 04 13:26:20 crc kubenswrapper[5003]: I0104 13:26:20.091897 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nbxfr_4dd9a653-4387-44cf-82a3-e6a7ec6713bf/extract-content/0.log" Jan 04 13:26:20 crc kubenswrapper[5003]: I0104 13:26:20.092182 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nbxfr_4dd9a653-4387-44cf-82a3-e6a7ec6713bf/extract-content/0.log" Jan 04 13:26:20 crc kubenswrapper[5003]: I0104 13:26:20.245946 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nbxfr_4dd9a653-4387-44cf-82a3-e6a7ec6713bf/extract-content/0.log" Jan 04 13:26:20 crc kubenswrapper[5003]: I0104 13:26:20.273626 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nbxfr_4dd9a653-4387-44cf-82a3-e6a7ec6713bf/extract-utilities/0.log" Jan 04 13:26:20 crc kubenswrapper[5003]: I0104 13:26:20.498362 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4gnrl_96de3db8-350f-49d3-96ab-2fc8b9535665/extract-utilities/0.log" Jan 04 13:26:20 crc kubenswrapper[5003]: I0104 13:26:20.504601 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-4v6d2_b778690e-8cf2-43d0-ac8b-f992c11318a8/registry-server/0.log" Jan 04 13:26:20 crc kubenswrapper[5003]: I0104 13:26:20.646060 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nbxfr_4dd9a653-4387-44cf-82a3-e6a7ec6713bf/registry-server/0.log" Jan 04 13:26:20 crc kubenswrapper[5003]: I0104 13:26:20.737666 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4gnrl_96de3db8-350f-49d3-96ab-2fc8b9535665/extract-utilities/0.log" Jan 04 13:26:20 crc kubenswrapper[5003]: I0104 13:26:20.758888 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4gnrl_96de3db8-350f-49d3-96ab-2fc8b9535665/extract-content/0.log" Jan 04 13:26:20 crc kubenswrapper[5003]: I0104 13:26:20.776702 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4gnrl_96de3db8-350f-49d3-96ab-2fc8b9535665/extract-content/0.log" Jan 04 13:26:20 crc kubenswrapper[5003]: I0104 13:26:20.926788 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4gnrl_96de3db8-350f-49d3-96ab-2fc8b9535665/extract-utilities/0.log" Jan 04 13:26:20 crc kubenswrapper[5003]: I0104 13:26:20.931610 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4gnrl_96de3db8-350f-49d3-96ab-2fc8b9535665/extract-content/0.log" Jan 04 13:26:21 crc kubenswrapper[5003]: I0104 13:26:21.837135 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-4gnrl_96de3db8-350f-49d3-96ab-2fc8b9535665/registry-server/0.log" Jan 04 13:26:22 crc kubenswrapper[5003]: I0104 13:26:22.807167 5003 scope.go:117] "RemoveContainer" containerID="7aa67a3c5c4fdbbd2e40aec0730029c1a3d38ba544747c5e09a0aef3629ccda4" Jan 04 13:26:22 crc kubenswrapper[5003]: E0104 13:26:22.807513 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:26:33 crc kubenswrapper[5003]: I0104 13:26:33.807478 5003 scope.go:117] "RemoveContainer" containerID="7aa67a3c5c4fdbbd2e40aec0730029c1a3d38ba544747c5e09a0aef3629ccda4" Jan 04 13:26:33 crc kubenswrapper[5003]: E0104 13:26:33.808597 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:26:46 crc kubenswrapper[5003]: I0104 13:26:46.807580 5003 scope.go:117] "RemoveContainer" containerID="7aa67a3c5c4fdbbd2e40aec0730029c1a3d38ba544747c5e09a0aef3629ccda4" Jan 04 13:26:46 crc kubenswrapper[5003]: E0104 13:26:46.808923 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:26:58 crc kubenswrapper[5003]: I0104 13:26:58.807170 5003 scope.go:117] "RemoveContainer" containerID="7aa67a3c5c4fdbbd2e40aec0730029c1a3d38ba544747c5e09a0aef3629ccda4" Jan 04 13:26:58 crc kubenswrapper[5003]: E0104 13:26:58.808163 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:26:59 crc kubenswrapper[5003]: I0104 13:26:59.637903 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7mfrb"] Jan 04 13:26:59 crc kubenswrapper[5003]: E0104 13:26:59.639258 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a90177e2-ac68-417b-aefb-4e8b09a83401" containerName="container-00" Jan 04 13:26:59 crc kubenswrapper[5003]: I0104 13:26:59.639388 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="a90177e2-ac68-417b-aefb-4e8b09a83401" containerName="container-00" Jan 04 13:26:59 crc kubenswrapper[5003]: I0104 13:26:59.639714 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="a90177e2-ac68-417b-aefb-4e8b09a83401" containerName="container-00" Jan 04 13:26:59 crc kubenswrapper[5003]: I0104 13:26:59.641367 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7mfrb" Jan 04 13:26:59 crc kubenswrapper[5003]: I0104 13:26:59.662988 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7mfrb"] Jan 04 13:26:59 crc kubenswrapper[5003]: I0104 13:26:59.750402 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e1ce37f-499b-47a7-9456-133443e7d735-utilities\") pod \"community-operators-7mfrb\" (UID: \"8e1ce37f-499b-47a7-9456-133443e7d735\") " pod="openshift-marketplace/community-operators-7mfrb" Jan 04 13:26:59 crc kubenswrapper[5003]: I0104 13:26:59.750738 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7m89j\" (UniqueName: \"kubernetes.io/projected/8e1ce37f-499b-47a7-9456-133443e7d735-kube-api-access-7m89j\") pod \"community-operators-7mfrb\" (UID: \"8e1ce37f-499b-47a7-9456-133443e7d735\") " pod="openshift-marketplace/community-operators-7mfrb" Jan 04 13:26:59 crc kubenswrapper[5003]: I0104 13:26:59.751312 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e1ce37f-499b-47a7-9456-133443e7d735-catalog-content\") pod \"community-operators-7mfrb\" (UID: \"8e1ce37f-499b-47a7-9456-133443e7d735\") " pod="openshift-marketplace/community-operators-7mfrb" Jan 04 13:26:59 crc kubenswrapper[5003]: I0104 13:26:59.853321 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e1ce37f-499b-47a7-9456-133443e7d735-catalog-content\") pod \"community-operators-7mfrb\" (UID: \"8e1ce37f-499b-47a7-9456-133443e7d735\") " pod="openshift-marketplace/community-operators-7mfrb" Jan 04 13:26:59 crc kubenswrapper[5003]: I0104 13:26:59.853446 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e1ce37f-499b-47a7-9456-133443e7d735-utilities\") pod \"community-operators-7mfrb\" (UID: \"8e1ce37f-499b-47a7-9456-133443e7d735\") " pod="openshift-marketplace/community-operators-7mfrb" Jan 04 13:26:59 crc kubenswrapper[5003]: I0104 13:26:59.853473 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7m89j\" (UniqueName: \"kubernetes.io/projected/8e1ce37f-499b-47a7-9456-133443e7d735-kube-api-access-7m89j\") pod \"community-operators-7mfrb\" (UID: \"8e1ce37f-499b-47a7-9456-133443e7d735\") " pod="openshift-marketplace/community-operators-7mfrb" Jan 04 13:26:59 crc kubenswrapper[5003]: I0104 13:26:59.854527 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e1ce37f-499b-47a7-9456-133443e7d735-catalog-content\") pod \"community-operators-7mfrb\" (UID: \"8e1ce37f-499b-47a7-9456-133443e7d735\") " pod="openshift-marketplace/community-operators-7mfrb" Jan 04 13:26:59 crc kubenswrapper[5003]: I0104 13:26:59.854827 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e1ce37f-499b-47a7-9456-133443e7d735-utilities\") pod \"community-operators-7mfrb\" (UID: \"8e1ce37f-499b-47a7-9456-133443e7d735\") " pod="openshift-marketplace/community-operators-7mfrb" Jan 04 13:26:59 crc kubenswrapper[5003]: I0104 13:26:59.884840 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7m89j\" (UniqueName: \"kubernetes.io/projected/8e1ce37f-499b-47a7-9456-133443e7d735-kube-api-access-7m89j\") pod \"community-operators-7mfrb\" (UID: \"8e1ce37f-499b-47a7-9456-133443e7d735\") " pod="openshift-marketplace/community-operators-7mfrb" Jan 04 13:26:59 crc kubenswrapper[5003]: I0104 13:26:59.968535 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7mfrb" Jan 04 13:27:00 crc kubenswrapper[5003]: I0104 13:27:00.677183 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7mfrb"] Jan 04 13:27:01 crc kubenswrapper[5003]: I0104 13:27:01.668228 5003 generic.go:334] "Generic (PLEG): container finished" podID="8e1ce37f-499b-47a7-9456-133443e7d735" containerID="1ffb45070d8125bad895107adb4a7484b20f88be877545fcfc557038c10d0cd5" exitCode=0 Jan 04 13:27:01 crc kubenswrapper[5003]: I0104 13:27:01.668326 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7mfrb" event={"ID":"8e1ce37f-499b-47a7-9456-133443e7d735","Type":"ContainerDied","Data":"1ffb45070d8125bad895107adb4a7484b20f88be877545fcfc557038c10d0cd5"} Jan 04 13:27:01 crc kubenswrapper[5003]: I0104 13:27:01.668991 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7mfrb" event={"ID":"8e1ce37f-499b-47a7-9456-133443e7d735","Type":"ContainerStarted","Data":"42e10e4b97c84d198356d250325e2fdb8d8694cad569c8858351e7f4372e9c5b"} Jan 04 13:27:02 crc kubenswrapper[5003]: I0104 13:27:02.698162 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7mfrb" event={"ID":"8e1ce37f-499b-47a7-9456-133443e7d735","Type":"ContainerStarted","Data":"a2c9caf3fc09a3579d1241a98492b9f27b538e5d18effddf55e6e6491663d6d9"} Jan 04 13:27:03 crc kubenswrapper[5003]: I0104 13:27:03.716494 5003 generic.go:334] "Generic (PLEG): container finished" podID="8e1ce37f-499b-47a7-9456-133443e7d735" containerID="a2c9caf3fc09a3579d1241a98492b9f27b538e5d18effddf55e6e6491663d6d9" exitCode=0 Jan 04 13:27:03 crc kubenswrapper[5003]: I0104 13:27:03.716571 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7mfrb" event={"ID":"8e1ce37f-499b-47a7-9456-133443e7d735","Type":"ContainerDied","Data":"a2c9caf3fc09a3579d1241a98492b9f27b538e5d18effddf55e6e6491663d6d9"} Jan 04 13:27:04 crc kubenswrapper[5003]: I0104 13:27:04.734507 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7mfrb" event={"ID":"8e1ce37f-499b-47a7-9456-133443e7d735","Type":"ContainerStarted","Data":"ab105aa1e94fa5be20daf87c46011eba5497bb8219cc45ec7cea8cd66ac71ac7"} Jan 04 13:27:04 crc kubenswrapper[5003]: I0104 13:27:04.773429 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7mfrb" podStartSLOduration=3.273829527 podStartE2EDuration="5.773408296s" podCreationTimestamp="2026-01-04 13:26:59 +0000 UTC" firstStartedPulling="2026-01-04 13:27:01.671277727 +0000 UTC m=+5937.144307598" lastFinishedPulling="2026-01-04 13:27:04.170856516 +0000 UTC m=+5939.643886367" observedRunningTime="2026-01-04 13:27:04.770476989 +0000 UTC m=+5940.243506870" watchObservedRunningTime="2026-01-04 13:27:04.773408296 +0000 UTC m=+5940.246438157" Jan 04 13:27:09 crc kubenswrapper[5003]: I0104 13:27:09.969327 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7mfrb" Jan 04 13:27:09 crc kubenswrapper[5003]: I0104 13:27:09.970162 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7mfrb" Jan 04 13:27:10 crc kubenswrapper[5003]: I0104 13:27:10.048690 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7mfrb" Jan 04 13:27:10 crc kubenswrapper[5003]: I0104 13:27:10.895233 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7mfrb" Jan 04 13:27:10 crc kubenswrapper[5003]: I0104 13:27:10.967547 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7mfrb"] Jan 04 13:27:12 crc kubenswrapper[5003]: I0104 13:27:12.810180 5003 scope.go:117] "RemoveContainer" containerID="7aa67a3c5c4fdbbd2e40aec0730029c1a3d38ba544747c5e09a0aef3629ccda4" Jan 04 13:27:12 crc kubenswrapper[5003]: E0104 13:27:12.810806 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:27:12 crc kubenswrapper[5003]: I0104 13:27:12.835534 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-7mfrb" podUID="8e1ce37f-499b-47a7-9456-133443e7d735" containerName="registry-server" containerID="cri-o://ab105aa1e94fa5be20daf87c46011eba5497bb8219cc45ec7cea8cd66ac71ac7" gracePeriod=2 Jan 04 13:27:13 crc kubenswrapper[5003]: I0104 13:27:13.851783 5003 generic.go:334] "Generic (PLEG): container finished" podID="8e1ce37f-499b-47a7-9456-133443e7d735" containerID="ab105aa1e94fa5be20daf87c46011eba5497bb8219cc45ec7cea8cd66ac71ac7" exitCode=0 Jan 04 13:27:13 crc kubenswrapper[5003]: I0104 13:27:13.851855 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7mfrb" event={"ID":"8e1ce37f-499b-47a7-9456-133443e7d735","Type":"ContainerDied","Data":"ab105aa1e94fa5be20daf87c46011eba5497bb8219cc45ec7cea8cd66ac71ac7"} Jan 04 13:27:13 crc kubenswrapper[5003]: I0104 13:27:13.852307 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7mfrb" event={"ID":"8e1ce37f-499b-47a7-9456-133443e7d735","Type":"ContainerDied","Data":"42e10e4b97c84d198356d250325e2fdb8d8694cad569c8858351e7f4372e9c5b"} Jan 04 13:27:13 crc kubenswrapper[5003]: I0104 13:27:13.852340 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="42e10e4b97c84d198356d250325e2fdb8d8694cad569c8858351e7f4372e9c5b" Jan 04 13:27:13 crc kubenswrapper[5003]: I0104 13:27:13.857807 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7mfrb" Jan 04 13:27:13 crc kubenswrapper[5003]: I0104 13:27:13.930710 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e1ce37f-499b-47a7-9456-133443e7d735-utilities\") pod \"8e1ce37f-499b-47a7-9456-133443e7d735\" (UID: \"8e1ce37f-499b-47a7-9456-133443e7d735\") " Jan 04 13:27:13 crc kubenswrapper[5003]: I0104 13:27:13.930761 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e1ce37f-499b-47a7-9456-133443e7d735-catalog-content\") pod \"8e1ce37f-499b-47a7-9456-133443e7d735\" (UID: \"8e1ce37f-499b-47a7-9456-133443e7d735\") " Jan 04 13:27:13 crc kubenswrapper[5003]: I0104 13:27:13.931175 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7m89j\" (UniqueName: \"kubernetes.io/projected/8e1ce37f-499b-47a7-9456-133443e7d735-kube-api-access-7m89j\") pod \"8e1ce37f-499b-47a7-9456-133443e7d735\" (UID: \"8e1ce37f-499b-47a7-9456-133443e7d735\") " Jan 04 13:27:13 crc kubenswrapper[5003]: I0104 13:27:13.932595 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e1ce37f-499b-47a7-9456-133443e7d735-utilities" (OuterVolumeSpecName: "utilities") pod "8e1ce37f-499b-47a7-9456-133443e7d735" (UID: "8e1ce37f-499b-47a7-9456-133443e7d735"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:27:13 crc kubenswrapper[5003]: I0104 13:27:13.938515 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e1ce37f-499b-47a7-9456-133443e7d735-kube-api-access-7m89j" (OuterVolumeSpecName: "kube-api-access-7m89j") pod "8e1ce37f-499b-47a7-9456-133443e7d735" (UID: "8e1ce37f-499b-47a7-9456-133443e7d735"). InnerVolumeSpecName "kube-api-access-7m89j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:27:14 crc kubenswrapper[5003]: I0104 13:27:14.005119 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e1ce37f-499b-47a7-9456-133443e7d735-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8e1ce37f-499b-47a7-9456-133443e7d735" (UID: "8e1ce37f-499b-47a7-9456-133443e7d735"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:27:14 crc kubenswrapper[5003]: I0104 13:27:14.032228 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e1ce37f-499b-47a7-9456-133443e7d735-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 13:27:14 crc kubenswrapper[5003]: I0104 13:27:14.032265 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e1ce37f-499b-47a7-9456-133443e7d735-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 13:27:14 crc kubenswrapper[5003]: I0104 13:27:14.032278 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7m89j\" (UniqueName: \"kubernetes.io/projected/8e1ce37f-499b-47a7-9456-133443e7d735-kube-api-access-7m89j\") on node \"crc\" DevicePath \"\"" Jan 04 13:27:14 crc kubenswrapper[5003]: I0104 13:27:14.864002 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7mfrb" Jan 04 13:27:14 crc kubenswrapper[5003]: I0104 13:27:14.910705 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7mfrb"] Jan 04 13:27:14 crc kubenswrapper[5003]: I0104 13:27:14.922191 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-7mfrb"] Jan 04 13:27:16 crc kubenswrapper[5003]: I0104 13:27:16.824476 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e1ce37f-499b-47a7-9456-133443e7d735" path="/var/lib/kubelet/pods/8e1ce37f-499b-47a7-9456-133443e7d735/volumes" Jan 04 13:27:25 crc kubenswrapper[5003]: I0104 13:27:25.807464 5003 scope.go:117] "RemoveContainer" containerID="7aa67a3c5c4fdbbd2e40aec0730029c1a3d38ba544747c5e09a0aef3629ccda4" Jan 04 13:27:25 crc kubenswrapper[5003]: E0104 13:27:25.808633 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:27:36 crc kubenswrapper[5003]: I0104 13:27:36.809498 5003 scope.go:117] "RemoveContainer" containerID="7aa67a3c5c4fdbbd2e40aec0730029c1a3d38ba544747c5e09a0aef3629ccda4" Jan 04 13:27:36 crc kubenswrapper[5003]: E0104 13:27:36.811249 5003 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rcgwp_openshift-machine-config-operator(1d785a1a-7eaf-4192-915a-49f478c2a59a)\"" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" Jan 04 13:27:47 crc kubenswrapper[5003]: I0104 13:27:47.807512 5003 scope.go:117] "RemoveContainer" containerID="7aa67a3c5c4fdbbd2e40aec0730029c1a3d38ba544747c5e09a0aef3629ccda4" Jan 04 13:27:48 crc kubenswrapper[5003]: I0104 13:27:48.344086 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" event={"ID":"1d785a1a-7eaf-4192-915a-49f478c2a59a","Type":"ContainerStarted","Data":"79c8fe8a00ffde74563d5dac40a5c1ca43a66f4bdf28590af4ed7d6b739d6616"} Jan 04 13:27:54 crc kubenswrapper[5003]: I0104 13:27:54.432954 5003 generic.go:334] "Generic (PLEG): container finished" podID="b3f04202-e84a-44a9-a571-a9cd51a8a1bc" containerID="220bba9d3f926193d66fe81356b562a3855a0760e6f353ceacd09e47e0a4aeff" exitCode=0 Jan 04 13:27:54 crc kubenswrapper[5003]: I0104 13:27:54.433112 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-w9lnh/must-gather-w2x2z" event={"ID":"b3f04202-e84a-44a9-a571-a9cd51a8a1bc","Type":"ContainerDied","Data":"220bba9d3f926193d66fe81356b562a3855a0760e6f353ceacd09e47e0a4aeff"} Jan 04 13:27:54 crc kubenswrapper[5003]: I0104 13:27:54.435378 5003 scope.go:117] "RemoveContainer" containerID="220bba9d3f926193d66fe81356b562a3855a0760e6f353ceacd09e47e0a4aeff" Jan 04 13:27:54 crc kubenswrapper[5003]: I0104 13:27:54.556590 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-w9lnh_must-gather-w2x2z_b3f04202-e84a-44a9-a571-a9cd51a8a1bc/gather/0.log" Jan 04 13:28:01 crc kubenswrapper[5003]: I0104 13:28:01.832786 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-w9lnh/must-gather-w2x2z"] Jan 04 13:28:01 crc kubenswrapper[5003]: I0104 13:28:01.834630 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-w9lnh/must-gather-w2x2z" podUID="b3f04202-e84a-44a9-a571-a9cd51a8a1bc" containerName="copy" containerID="cri-o://310158d0c40217033843388ed6c08226669162ac3d5a941ec2398c57fed6e4ee" gracePeriod=2 Jan 04 13:28:01 crc kubenswrapper[5003]: I0104 13:28:01.846353 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-w9lnh/must-gather-w2x2z"] Jan 04 13:28:02 crc kubenswrapper[5003]: I0104 13:28:02.291409 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-w9lnh_must-gather-w2x2z_b3f04202-e84a-44a9-a571-a9cd51a8a1bc/copy/0.log" Jan 04 13:28:02 crc kubenswrapper[5003]: I0104 13:28:02.292495 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-w9lnh/must-gather-w2x2z" Jan 04 13:28:02 crc kubenswrapper[5003]: I0104 13:28:02.422604 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fjm76\" (UniqueName: \"kubernetes.io/projected/b3f04202-e84a-44a9-a571-a9cd51a8a1bc-kube-api-access-fjm76\") pod \"b3f04202-e84a-44a9-a571-a9cd51a8a1bc\" (UID: \"b3f04202-e84a-44a9-a571-a9cd51a8a1bc\") " Jan 04 13:28:02 crc kubenswrapper[5003]: I0104 13:28:02.422890 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/b3f04202-e84a-44a9-a571-a9cd51a8a1bc-must-gather-output\") pod \"b3f04202-e84a-44a9-a571-a9cd51a8a1bc\" (UID: \"b3f04202-e84a-44a9-a571-a9cd51a8a1bc\") " Jan 04 13:28:02 crc kubenswrapper[5003]: I0104 13:28:02.435471 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3f04202-e84a-44a9-a571-a9cd51a8a1bc-kube-api-access-fjm76" (OuterVolumeSpecName: "kube-api-access-fjm76") pod "b3f04202-e84a-44a9-a571-a9cd51a8a1bc" (UID: "b3f04202-e84a-44a9-a571-a9cd51a8a1bc"). InnerVolumeSpecName "kube-api-access-fjm76". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:28:02 crc kubenswrapper[5003]: I0104 13:28:02.525353 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fjm76\" (UniqueName: \"kubernetes.io/projected/b3f04202-e84a-44a9-a571-a9cd51a8a1bc-kube-api-access-fjm76\") on node \"crc\" DevicePath \"\"" Jan 04 13:28:02 crc kubenswrapper[5003]: I0104 13:28:02.539516 5003 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-w9lnh_must-gather-w2x2z_b3f04202-e84a-44a9-a571-a9cd51a8a1bc/copy/0.log" Jan 04 13:28:02 crc kubenswrapper[5003]: I0104 13:28:02.540391 5003 generic.go:334] "Generic (PLEG): container finished" podID="b3f04202-e84a-44a9-a571-a9cd51a8a1bc" containerID="310158d0c40217033843388ed6c08226669162ac3d5a941ec2398c57fed6e4ee" exitCode=143 Jan 04 13:28:02 crc kubenswrapper[5003]: I0104 13:28:02.540490 5003 scope.go:117] "RemoveContainer" containerID="310158d0c40217033843388ed6c08226669162ac3d5a941ec2398c57fed6e4ee" Jan 04 13:28:02 crc kubenswrapper[5003]: I0104 13:28:02.540502 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-w9lnh/must-gather-w2x2z" Jan 04 13:28:02 crc kubenswrapper[5003]: I0104 13:28:02.562795 5003 scope.go:117] "RemoveContainer" containerID="220bba9d3f926193d66fe81356b562a3855a0760e6f353ceacd09e47e0a4aeff" Jan 04 13:28:02 crc kubenswrapper[5003]: I0104 13:28:02.586812 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b3f04202-e84a-44a9-a571-a9cd51a8a1bc-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "b3f04202-e84a-44a9-a571-a9cd51a8a1bc" (UID: "b3f04202-e84a-44a9-a571-a9cd51a8a1bc"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:28:02 crc kubenswrapper[5003]: I0104 13:28:02.628272 5003 scope.go:117] "RemoveContainer" containerID="310158d0c40217033843388ed6c08226669162ac3d5a941ec2398c57fed6e4ee" Jan 04 13:28:02 crc kubenswrapper[5003]: I0104 13:28:02.628404 5003 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/b3f04202-e84a-44a9-a571-a9cd51a8a1bc-must-gather-output\") on node \"crc\" DevicePath \"\"" Jan 04 13:28:02 crc kubenswrapper[5003]: E0104 13:28:02.630198 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"310158d0c40217033843388ed6c08226669162ac3d5a941ec2398c57fed6e4ee\": container with ID starting with 310158d0c40217033843388ed6c08226669162ac3d5a941ec2398c57fed6e4ee not found: ID does not exist" containerID="310158d0c40217033843388ed6c08226669162ac3d5a941ec2398c57fed6e4ee" Jan 04 13:28:02 crc kubenswrapper[5003]: I0104 13:28:02.630261 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"310158d0c40217033843388ed6c08226669162ac3d5a941ec2398c57fed6e4ee"} err="failed to get container status \"310158d0c40217033843388ed6c08226669162ac3d5a941ec2398c57fed6e4ee\": rpc error: code = NotFound desc = could not find container \"310158d0c40217033843388ed6c08226669162ac3d5a941ec2398c57fed6e4ee\": container with ID starting with 310158d0c40217033843388ed6c08226669162ac3d5a941ec2398c57fed6e4ee not found: ID does not exist" Jan 04 13:28:02 crc kubenswrapper[5003]: I0104 13:28:02.630301 5003 scope.go:117] "RemoveContainer" containerID="220bba9d3f926193d66fe81356b562a3855a0760e6f353ceacd09e47e0a4aeff" Jan 04 13:28:02 crc kubenswrapper[5003]: E0104 13:28:02.631066 5003 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"220bba9d3f926193d66fe81356b562a3855a0760e6f353ceacd09e47e0a4aeff\": container with ID starting with 220bba9d3f926193d66fe81356b562a3855a0760e6f353ceacd09e47e0a4aeff not found: ID does not exist" containerID="220bba9d3f926193d66fe81356b562a3855a0760e6f353ceacd09e47e0a4aeff" Jan 04 13:28:02 crc kubenswrapper[5003]: I0104 13:28:02.631111 5003 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"220bba9d3f926193d66fe81356b562a3855a0760e6f353ceacd09e47e0a4aeff"} err="failed to get container status \"220bba9d3f926193d66fe81356b562a3855a0760e6f353ceacd09e47e0a4aeff\": rpc error: code = NotFound desc = could not find container \"220bba9d3f926193d66fe81356b562a3855a0760e6f353ceacd09e47e0a4aeff\": container with ID starting with 220bba9d3f926193d66fe81356b562a3855a0760e6f353ceacd09e47e0a4aeff not found: ID does not exist" Jan 04 13:28:02 crc kubenswrapper[5003]: I0104 13:28:02.819630 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3f04202-e84a-44a9-a571-a9cd51a8a1bc" path="/var/lib/kubelet/pods/b3f04202-e84a-44a9-a571-a9cd51a8a1bc/volumes" Jan 04 13:29:02 crc kubenswrapper[5003]: I0104 13:29:02.580421 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-kjh8t"] Jan 04 13:29:02 crc kubenswrapper[5003]: E0104 13:29:02.584981 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e1ce37f-499b-47a7-9456-133443e7d735" containerName="extract-utilities" Jan 04 13:29:02 crc kubenswrapper[5003]: I0104 13:29:02.585258 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e1ce37f-499b-47a7-9456-133443e7d735" containerName="extract-utilities" Jan 04 13:29:02 crc kubenswrapper[5003]: E0104 13:29:02.585452 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e1ce37f-499b-47a7-9456-133443e7d735" containerName="extract-content" Jan 04 13:29:02 crc kubenswrapper[5003]: I0104 13:29:02.585650 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e1ce37f-499b-47a7-9456-133443e7d735" containerName="extract-content" Jan 04 13:29:02 crc kubenswrapper[5003]: E0104 13:29:02.585838 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3f04202-e84a-44a9-a571-a9cd51a8a1bc" containerName="gather" Jan 04 13:29:02 crc kubenswrapper[5003]: I0104 13:29:02.585994 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3f04202-e84a-44a9-a571-a9cd51a8a1bc" containerName="gather" Jan 04 13:29:02 crc kubenswrapper[5003]: E0104 13:29:02.586208 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3f04202-e84a-44a9-a571-a9cd51a8a1bc" containerName="copy" Jan 04 13:29:02 crc kubenswrapper[5003]: I0104 13:29:02.586368 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3f04202-e84a-44a9-a571-a9cd51a8a1bc" containerName="copy" Jan 04 13:29:02 crc kubenswrapper[5003]: E0104 13:29:02.586576 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e1ce37f-499b-47a7-9456-133443e7d735" containerName="registry-server" Jan 04 13:29:02 crc kubenswrapper[5003]: I0104 13:29:02.586737 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e1ce37f-499b-47a7-9456-133443e7d735" containerName="registry-server" Jan 04 13:29:02 crc kubenswrapper[5003]: I0104 13:29:02.587363 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3f04202-e84a-44a9-a571-a9cd51a8a1bc" containerName="gather" Jan 04 13:29:02 crc kubenswrapper[5003]: I0104 13:29:02.587558 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3f04202-e84a-44a9-a571-a9cd51a8a1bc" containerName="copy" Jan 04 13:29:02 crc kubenswrapper[5003]: I0104 13:29:02.587726 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e1ce37f-499b-47a7-9456-133443e7d735" containerName="registry-server" Jan 04 13:29:02 crc kubenswrapper[5003]: I0104 13:29:02.593708 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kjh8t" Jan 04 13:29:02 crc kubenswrapper[5003]: I0104 13:29:02.612380 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kjh8t"] Jan 04 13:29:02 crc kubenswrapper[5003]: I0104 13:29:02.684410 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zfrm\" (UniqueName: \"kubernetes.io/projected/28adb461-60bc-4f52-8592-a90935525a49-kube-api-access-5zfrm\") pod \"redhat-operators-kjh8t\" (UID: \"28adb461-60bc-4f52-8592-a90935525a49\") " pod="openshift-marketplace/redhat-operators-kjh8t" Jan 04 13:29:02 crc kubenswrapper[5003]: I0104 13:29:02.684485 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28adb461-60bc-4f52-8592-a90935525a49-catalog-content\") pod \"redhat-operators-kjh8t\" (UID: \"28adb461-60bc-4f52-8592-a90935525a49\") " pod="openshift-marketplace/redhat-operators-kjh8t" Jan 04 13:29:02 crc kubenswrapper[5003]: I0104 13:29:02.684566 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28adb461-60bc-4f52-8592-a90935525a49-utilities\") pod \"redhat-operators-kjh8t\" (UID: \"28adb461-60bc-4f52-8592-a90935525a49\") " pod="openshift-marketplace/redhat-operators-kjh8t" Jan 04 13:29:02 crc kubenswrapper[5003]: I0104 13:29:02.786071 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zfrm\" (UniqueName: \"kubernetes.io/projected/28adb461-60bc-4f52-8592-a90935525a49-kube-api-access-5zfrm\") pod \"redhat-operators-kjh8t\" (UID: \"28adb461-60bc-4f52-8592-a90935525a49\") " pod="openshift-marketplace/redhat-operators-kjh8t" Jan 04 13:29:02 crc kubenswrapper[5003]: I0104 13:29:02.786133 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28adb461-60bc-4f52-8592-a90935525a49-catalog-content\") pod \"redhat-operators-kjh8t\" (UID: \"28adb461-60bc-4f52-8592-a90935525a49\") " pod="openshift-marketplace/redhat-operators-kjh8t" Jan 04 13:29:02 crc kubenswrapper[5003]: I0104 13:29:02.786208 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28adb461-60bc-4f52-8592-a90935525a49-utilities\") pod \"redhat-operators-kjh8t\" (UID: \"28adb461-60bc-4f52-8592-a90935525a49\") " pod="openshift-marketplace/redhat-operators-kjh8t" Jan 04 13:29:02 crc kubenswrapper[5003]: I0104 13:29:02.786830 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28adb461-60bc-4f52-8592-a90935525a49-catalog-content\") pod \"redhat-operators-kjh8t\" (UID: \"28adb461-60bc-4f52-8592-a90935525a49\") " pod="openshift-marketplace/redhat-operators-kjh8t" Jan 04 13:29:02 crc kubenswrapper[5003]: I0104 13:29:02.786845 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28adb461-60bc-4f52-8592-a90935525a49-utilities\") pod \"redhat-operators-kjh8t\" (UID: \"28adb461-60bc-4f52-8592-a90935525a49\") " pod="openshift-marketplace/redhat-operators-kjh8t" Jan 04 13:29:02 crc kubenswrapper[5003]: I0104 13:29:02.809637 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zfrm\" (UniqueName: \"kubernetes.io/projected/28adb461-60bc-4f52-8592-a90935525a49-kube-api-access-5zfrm\") pod \"redhat-operators-kjh8t\" (UID: \"28adb461-60bc-4f52-8592-a90935525a49\") " pod="openshift-marketplace/redhat-operators-kjh8t" Jan 04 13:29:02 crc kubenswrapper[5003]: I0104 13:29:02.923459 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kjh8t" Jan 04 13:29:03 crc kubenswrapper[5003]: I0104 13:29:03.425408 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kjh8t"] Jan 04 13:29:04 crc kubenswrapper[5003]: I0104 13:29:04.394747 5003 generic.go:334] "Generic (PLEG): container finished" podID="28adb461-60bc-4f52-8592-a90935525a49" containerID="dafeb0fb81dae760ec074d2462361e93af280f1bbb0e241983314da3d1724a7c" exitCode=0 Jan 04 13:29:04 crc kubenswrapper[5003]: I0104 13:29:04.394986 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kjh8t" event={"ID":"28adb461-60bc-4f52-8592-a90935525a49","Type":"ContainerDied","Data":"dafeb0fb81dae760ec074d2462361e93af280f1bbb0e241983314da3d1724a7c"} Jan 04 13:29:04 crc kubenswrapper[5003]: I0104 13:29:04.396852 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kjh8t" event={"ID":"28adb461-60bc-4f52-8592-a90935525a49","Type":"ContainerStarted","Data":"008084cc2f2c1a8328855f5deba62421e2bec7b9def2fd168233a01a18331a14"} Jan 04 13:29:04 crc kubenswrapper[5003]: I0104 13:29:04.398815 5003 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 04 13:29:05 crc kubenswrapper[5003]: I0104 13:29:05.410565 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kjh8t" event={"ID":"28adb461-60bc-4f52-8592-a90935525a49","Type":"ContainerStarted","Data":"fc78abe4c5c4af2eb882795fd3f0d2bfbdd84403f5b9826a26a7f5f1f5896c04"} Jan 04 13:29:06 crc kubenswrapper[5003]: I0104 13:29:06.427129 5003 generic.go:334] "Generic (PLEG): container finished" podID="28adb461-60bc-4f52-8592-a90935525a49" containerID="fc78abe4c5c4af2eb882795fd3f0d2bfbdd84403f5b9826a26a7f5f1f5896c04" exitCode=0 Jan 04 13:29:06 crc kubenswrapper[5003]: I0104 13:29:06.427262 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kjh8t" event={"ID":"28adb461-60bc-4f52-8592-a90935525a49","Type":"ContainerDied","Data":"fc78abe4c5c4af2eb882795fd3f0d2bfbdd84403f5b9826a26a7f5f1f5896c04"} Jan 04 13:29:07 crc kubenswrapper[5003]: I0104 13:29:07.445907 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kjh8t" event={"ID":"28adb461-60bc-4f52-8592-a90935525a49","Type":"ContainerStarted","Data":"e2338a45842ca12d54b08231908b465e03fd51ad0d953f8fa7f0fcc08d777561"} Jan 04 13:29:07 crc kubenswrapper[5003]: I0104 13:29:07.477746 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-kjh8t" podStartSLOduration=2.980704324 podStartE2EDuration="5.477715218s" podCreationTimestamp="2026-01-04 13:29:02 +0000 UTC" firstStartedPulling="2026-01-04 13:29:04.398344907 +0000 UTC m=+6059.871374788" lastFinishedPulling="2026-01-04 13:29:06.895355831 +0000 UTC m=+6062.368385682" observedRunningTime="2026-01-04 13:29:07.475636363 +0000 UTC m=+6062.948666234" watchObservedRunningTime="2026-01-04 13:29:07.477715218 +0000 UTC m=+6062.950745099" Jan 04 13:29:12 crc kubenswrapper[5003]: I0104 13:29:12.924742 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-kjh8t" Jan 04 13:29:12 crc kubenswrapper[5003]: I0104 13:29:12.925667 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-kjh8t" Jan 04 13:29:13 crc kubenswrapper[5003]: I0104 13:29:13.994740 5003 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-kjh8t" podUID="28adb461-60bc-4f52-8592-a90935525a49" containerName="registry-server" probeResult="failure" output=< Jan 04 13:29:13 crc kubenswrapper[5003]: timeout: failed to connect service ":50051" within 1s Jan 04 13:29:13 crc kubenswrapper[5003]: > Jan 04 13:29:23 crc kubenswrapper[5003]: I0104 13:29:22.999371 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-kjh8t" Jan 04 13:29:23 crc kubenswrapper[5003]: I0104 13:29:23.057119 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-kjh8t" Jan 04 13:29:23 crc kubenswrapper[5003]: I0104 13:29:23.255809 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kjh8t"] Jan 04 13:29:24 crc kubenswrapper[5003]: I0104 13:29:24.646873 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-kjh8t" podUID="28adb461-60bc-4f52-8592-a90935525a49" containerName="registry-server" containerID="cri-o://e2338a45842ca12d54b08231908b465e03fd51ad0d953f8fa7f0fcc08d777561" gracePeriod=2 Jan 04 13:29:25 crc kubenswrapper[5003]: I0104 13:29:25.661748 5003 generic.go:334] "Generic (PLEG): container finished" podID="28adb461-60bc-4f52-8592-a90935525a49" containerID="e2338a45842ca12d54b08231908b465e03fd51ad0d953f8fa7f0fcc08d777561" exitCode=0 Jan 04 13:29:25 crc kubenswrapper[5003]: I0104 13:29:25.661836 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kjh8t" event={"ID":"28adb461-60bc-4f52-8592-a90935525a49","Type":"ContainerDied","Data":"e2338a45842ca12d54b08231908b465e03fd51ad0d953f8fa7f0fcc08d777561"} Jan 04 13:29:26 crc kubenswrapper[5003]: I0104 13:29:26.331450 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kjh8t" Jan 04 13:29:26 crc kubenswrapper[5003]: I0104 13:29:26.453551 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28adb461-60bc-4f52-8592-a90935525a49-utilities\") pod \"28adb461-60bc-4f52-8592-a90935525a49\" (UID: \"28adb461-60bc-4f52-8592-a90935525a49\") " Jan 04 13:29:26 crc kubenswrapper[5003]: I0104 13:29:26.453718 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5zfrm\" (UniqueName: \"kubernetes.io/projected/28adb461-60bc-4f52-8592-a90935525a49-kube-api-access-5zfrm\") pod \"28adb461-60bc-4f52-8592-a90935525a49\" (UID: \"28adb461-60bc-4f52-8592-a90935525a49\") " Jan 04 13:29:26 crc kubenswrapper[5003]: I0104 13:29:26.453750 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28adb461-60bc-4f52-8592-a90935525a49-catalog-content\") pod \"28adb461-60bc-4f52-8592-a90935525a49\" (UID: \"28adb461-60bc-4f52-8592-a90935525a49\") " Jan 04 13:29:26 crc kubenswrapper[5003]: I0104 13:29:26.455400 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28adb461-60bc-4f52-8592-a90935525a49-utilities" (OuterVolumeSpecName: "utilities") pod "28adb461-60bc-4f52-8592-a90935525a49" (UID: "28adb461-60bc-4f52-8592-a90935525a49"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:29:26 crc kubenswrapper[5003]: I0104 13:29:26.461786 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28adb461-60bc-4f52-8592-a90935525a49-kube-api-access-5zfrm" (OuterVolumeSpecName: "kube-api-access-5zfrm") pod "28adb461-60bc-4f52-8592-a90935525a49" (UID: "28adb461-60bc-4f52-8592-a90935525a49"). InnerVolumeSpecName "kube-api-access-5zfrm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:29:26 crc kubenswrapper[5003]: I0104 13:29:26.556779 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28adb461-60bc-4f52-8592-a90935525a49-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 13:29:26 crc kubenswrapper[5003]: I0104 13:29:26.556839 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5zfrm\" (UniqueName: \"kubernetes.io/projected/28adb461-60bc-4f52-8592-a90935525a49-kube-api-access-5zfrm\") on node \"crc\" DevicePath \"\"" Jan 04 13:29:26 crc kubenswrapper[5003]: I0104 13:29:26.591262 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28adb461-60bc-4f52-8592-a90935525a49-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "28adb461-60bc-4f52-8592-a90935525a49" (UID: "28adb461-60bc-4f52-8592-a90935525a49"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:29:26 crc kubenswrapper[5003]: I0104 13:29:26.659484 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28adb461-60bc-4f52-8592-a90935525a49-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 13:29:26 crc kubenswrapper[5003]: I0104 13:29:26.675742 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kjh8t" event={"ID":"28adb461-60bc-4f52-8592-a90935525a49","Type":"ContainerDied","Data":"008084cc2f2c1a8328855f5deba62421e2bec7b9def2fd168233a01a18331a14"} Jan 04 13:29:26 crc kubenswrapper[5003]: I0104 13:29:26.675844 5003 scope.go:117] "RemoveContainer" containerID="e2338a45842ca12d54b08231908b465e03fd51ad0d953f8fa7f0fcc08d777561" Jan 04 13:29:26 crc kubenswrapper[5003]: I0104 13:29:26.675870 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kjh8t" Jan 04 13:29:26 crc kubenswrapper[5003]: I0104 13:29:26.702695 5003 scope.go:117] "RemoveContainer" containerID="fc78abe4c5c4af2eb882795fd3f0d2bfbdd84403f5b9826a26a7f5f1f5896c04" Jan 04 13:29:26 crc kubenswrapper[5003]: I0104 13:29:26.718100 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kjh8t"] Jan 04 13:29:26 crc kubenswrapper[5003]: I0104 13:29:26.725420 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-kjh8t"] Jan 04 13:29:26 crc kubenswrapper[5003]: I0104 13:29:26.758080 5003 scope.go:117] "RemoveContainer" containerID="dafeb0fb81dae760ec074d2462361e93af280f1bbb0e241983314da3d1724a7c" Jan 04 13:29:26 crc kubenswrapper[5003]: I0104 13:29:26.825782 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28adb461-60bc-4f52-8592-a90935525a49" path="/var/lib/kubelet/pods/28adb461-60bc-4f52-8592-a90935525a49/volumes" Jan 04 13:29:31 crc kubenswrapper[5003]: I0104 13:29:31.064203 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-80e0-account-create-update-xqp87"] Jan 04 13:29:31 crc kubenswrapper[5003]: I0104 13:29:31.080066 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-pt9b2"] Jan 04 13:29:31 crc kubenswrapper[5003]: I0104 13:29:31.093803 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-80e0-account-create-update-xqp87"] Jan 04 13:29:31 crc kubenswrapper[5003]: I0104 13:29:31.101975 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-pt9b2"] Jan 04 13:29:32 crc kubenswrapper[5003]: I0104 13:29:32.818647 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26b75c4c-5b23-426d-9880-d2d9a8daebd3" path="/var/lib/kubelet/pods/26b75c4c-5b23-426d-9880-d2d9a8daebd3/volumes" Jan 04 13:29:32 crc kubenswrapper[5003]: I0104 13:29:32.819306 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9610d2b5-f6c7-4144-a02c-94b8b418f540" path="/var/lib/kubelet/pods/9610d2b5-f6c7-4144-a02c-94b8b418f540/volumes" Jan 04 13:29:37 crc kubenswrapper[5003]: I0104 13:29:37.045334 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-622cf"] Jan 04 13:29:37 crc kubenswrapper[5003]: I0104 13:29:37.059115 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-622cf"] Jan 04 13:29:38 crc kubenswrapper[5003]: I0104 13:29:38.819383 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05dffc83-fce8-4e1c-8a0a-9120efe4b704" path="/var/lib/kubelet/pods/05dffc83-fce8-4e1c-8a0a-9120efe4b704/volumes" Jan 04 13:29:39 crc kubenswrapper[5003]: I0104 13:29:39.558434 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9ww5m"] Jan 04 13:29:39 crc kubenswrapper[5003]: E0104 13:29:39.559310 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28adb461-60bc-4f52-8592-a90935525a49" containerName="registry-server" Jan 04 13:29:39 crc kubenswrapper[5003]: I0104 13:29:39.559329 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="28adb461-60bc-4f52-8592-a90935525a49" containerName="registry-server" Jan 04 13:29:39 crc kubenswrapper[5003]: E0104 13:29:39.559348 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28adb461-60bc-4f52-8592-a90935525a49" containerName="extract-utilities" Jan 04 13:29:39 crc kubenswrapper[5003]: I0104 13:29:39.559357 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="28adb461-60bc-4f52-8592-a90935525a49" containerName="extract-utilities" Jan 04 13:29:39 crc kubenswrapper[5003]: E0104 13:29:39.559378 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28adb461-60bc-4f52-8592-a90935525a49" containerName="extract-content" Jan 04 13:29:39 crc kubenswrapper[5003]: I0104 13:29:39.559387 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="28adb461-60bc-4f52-8592-a90935525a49" containerName="extract-content" Jan 04 13:29:39 crc kubenswrapper[5003]: I0104 13:29:39.559598 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="28adb461-60bc-4f52-8592-a90935525a49" containerName="registry-server" Jan 04 13:29:39 crc kubenswrapper[5003]: I0104 13:29:39.561782 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9ww5m" Jan 04 13:29:39 crc kubenswrapper[5003]: I0104 13:29:39.584408 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9ww5m"] Jan 04 13:29:39 crc kubenswrapper[5003]: I0104 13:29:39.656084 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d3b5016-2b38-47a5-80b9-2486f9a44120-utilities\") pod \"certified-operators-9ww5m\" (UID: \"2d3b5016-2b38-47a5-80b9-2486f9a44120\") " pod="openshift-marketplace/certified-operators-9ww5m" Jan 04 13:29:39 crc kubenswrapper[5003]: I0104 13:29:39.656315 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d3b5016-2b38-47a5-80b9-2486f9a44120-catalog-content\") pod \"certified-operators-9ww5m\" (UID: \"2d3b5016-2b38-47a5-80b9-2486f9a44120\") " pod="openshift-marketplace/certified-operators-9ww5m" Jan 04 13:29:39 crc kubenswrapper[5003]: I0104 13:29:39.656463 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tsmd\" (UniqueName: \"kubernetes.io/projected/2d3b5016-2b38-47a5-80b9-2486f9a44120-kube-api-access-7tsmd\") pod \"certified-operators-9ww5m\" (UID: \"2d3b5016-2b38-47a5-80b9-2486f9a44120\") " pod="openshift-marketplace/certified-operators-9ww5m" Jan 04 13:29:39 crc kubenswrapper[5003]: I0104 13:29:39.758536 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d3b5016-2b38-47a5-80b9-2486f9a44120-utilities\") pod \"certified-operators-9ww5m\" (UID: \"2d3b5016-2b38-47a5-80b9-2486f9a44120\") " pod="openshift-marketplace/certified-operators-9ww5m" Jan 04 13:29:39 crc kubenswrapper[5003]: I0104 13:29:39.758603 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d3b5016-2b38-47a5-80b9-2486f9a44120-catalog-content\") pod \"certified-operators-9ww5m\" (UID: \"2d3b5016-2b38-47a5-80b9-2486f9a44120\") " pod="openshift-marketplace/certified-operators-9ww5m" Jan 04 13:29:39 crc kubenswrapper[5003]: I0104 13:29:39.758693 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tsmd\" (UniqueName: \"kubernetes.io/projected/2d3b5016-2b38-47a5-80b9-2486f9a44120-kube-api-access-7tsmd\") pod \"certified-operators-9ww5m\" (UID: \"2d3b5016-2b38-47a5-80b9-2486f9a44120\") " pod="openshift-marketplace/certified-operators-9ww5m" Jan 04 13:29:39 crc kubenswrapper[5003]: I0104 13:29:39.759292 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d3b5016-2b38-47a5-80b9-2486f9a44120-utilities\") pod \"certified-operators-9ww5m\" (UID: \"2d3b5016-2b38-47a5-80b9-2486f9a44120\") " pod="openshift-marketplace/certified-operators-9ww5m" Jan 04 13:29:39 crc kubenswrapper[5003]: I0104 13:29:39.759367 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d3b5016-2b38-47a5-80b9-2486f9a44120-catalog-content\") pod \"certified-operators-9ww5m\" (UID: \"2d3b5016-2b38-47a5-80b9-2486f9a44120\") " pod="openshift-marketplace/certified-operators-9ww5m" Jan 04 13:29:39 crc kubenswrapper[5003]: I0104 13:29:39.795103 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tsmd\" (UniqueName: \"kubernetes.io/projected/2d3b5016-2b38-47a5-80b9-2486f9a44120-kube-api-access-7tsmd\") pod \"certified-operators-9ww5m\" (UID: \"2d3b5016-2b38-47a5-80b9-2486f9a44120\") " pod="openshift-marketplace/certified-operators-9ww5m" Jan 04 13:29:39 crc kubenswrapper[5003]: I0104 13:29:39.889215 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9ww5m" Jan 04 13:29:40 crc kubenswrapper[5003]: I0104 13:29:40.361261 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9ww5m"] Jan 04 13:29:40 crc kubenswrapper[5003]: I0104 13:29:40.828732 5003 generic.go:334] "Generic (PLEG): container finished" podID="2d3b5016-2b38-47a5-80b9-2486f9a44120" containerID="f40b9e450957528936d994cfbde18444b9d4638795a130c1fb8c8d51a38d7653" exitCode=0 Jan 04 13:29:40 crc kubenswrapper[5003]: I0104 13:29:40.828772 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9ww5m" event={"ID":"2d3b5016-2b38-47a5-80b9-2486f9a44120","Type":"ContainerDied","Data":"f40b9e450957528936d994cfbde18444b9d4638795a130c1fb8c8d51a38d7653"} Jan 04 13:29:40 crc kubenswrapper[5003]: I0104 13:29:40.828790 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9ww5m" event={"ID":"2d3b5016-2b38-47a5-80b9-2486f9a44120","Type":"ContainerStarted","Data":"06126531730b916f5b06e330b348f2f0f4b86a193a53325787c324cf64d4424c"} Jan 04 13:29:42 crc kubenswrapper[5003]: I0104 13:29:42.852538 5003 generic.go:334] "Generic (PLEG): container finished" podID="2d3b5016-2b38-47a5-80b9-2486f9a44120" containerID="88b5734fa87fc7313dde1f4b2b51dcab940d4dc08432138acad5edfba0e8dfe3" exitCode=0 Jan 04 13:29:42 crc kubenswrapper[5003]: I0104 13:29:42.852603 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9ww5m" event={"ID":"2d3b5016-2b38-47a5-80b9-2486f9a44120","Type":"ContainerDied","Data":"88b5734fa87fc7313dde1f4b2b51dcab940d4dc08432138acad5edfba0e8dfe3"} Jan 04 13:29:43 crc kubenswrapper[5003]: I0104 13:29:43.866472 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9ww5m" event={"ID":"2d3b5016-2b38-47a5-80b9-2486f9a44120","Type":"ContainerStarted","Data":"9aedc13f4479bc02c51f471f66f6b5c0a52e685c7d002f08aa2f9924712866df"} Jan 04 13:29:43 crc kubenswrapper[5003]: I0104 13:29:43.891963 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9ww5m" podStartSLOduration=2.382921808 podStartE2EDuration="4.89193613s" podCreationTimestamp="2026-01-04 13:29:39 +0000 UTC" firstStartedPulling="2026-01-04 13:29:40.831208732 +0000 UTC m=+6096.304238573" lastFinishedPulling="2026-01-04 13:29:43.340223034 +0000 UTC m=+6098.813252895" observedRunningTime="2026-01-04 13:29:43.885465909 +0000 UTC m=+6099.358495750" watchObservedRunningTime="2026-01-04 13:29:43.89193613 +0000 UTC m=+6099.364965971" Jan 04 13:29:49 crc kubenswrapper[5003]: I0104 13:29:49.889779 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9ww5m" Jan 04 13:29:49 crc kubenswrapper[5003]: I0104 13:29:49.890591 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9ww5m" Jan 04 13:29:49 crc kubenswrapper[5003]: I0104 13:29:49.970819 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9ww5m" Jan 04 13:29:50 crc kubenswrapper[5003]: I0104 13:29:50.056611 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9ww5m" Jan 04 13:29:50 crc kubenswrapper[5003]: I0104 13:29:50.228936 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9ww5m"] Jan 04 13:29:51 crc kubenswrapper[5003]: I0104 13:29:51.947600 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-9ww5m" podUID="2d3b5016-2b38-47a5-80b9-2486f9a44120" containerName="registry-server" containerID="cri-o://9aedc13f4479bc02c51f471f66f6b5c0a52e685c7d002f08aa2f9924712866df" gracePeriod=2 Jan 04 13:29:52 crc kubenswrapper[5003]: I0104 13:29:52.957590 5003 generic.go:334] "Generic (PLEG): container finished" podID="2d3b5016-2b38-47a5-80b9-2486f9a44120" containerID="9aedc13f4479bc02c51f471f66f6b5c0a52e685c7d002f08aa2f9924712866df" exitCode=0 Jan 04 13:29:52 crc kubenswrapper[5003]: I0104 13:29:52.957804 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9ww5m" event={"ID":"2d3b5016-2b38-47a5-80b9-2486f9a44120","Type":"ContainerDied","Data":"9aedc13f4479bc02c51f471f66f6b5c0a52e685c7d002f08aa2f9924712866df"} Jan 04 13:29:52 crc kubenswrapper[5003]: I0104 13:29:52.958111 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9ww5m" event={"ID":"2d3b5016-2b38-47a5-80b9-2486f9a44120","Type":"ContainerDied","Data":"06126531730b916f5b06e330b348f2f0f4b86a193a53325787c324cf64d4424c"} Jan 04 13:29:52 crc kubenswrapper[5003]: I0104 13:29:52.958132 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="06126531730b916f5b06e330b348f2f0f4b86a193a53325787c324cf64d4424c" Jan 04 13:29:53 crc kubenswrapper[5003]: I0104 13:29:53.048450 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-n76kt"] Jan 04 13:29:53 crc kubenswrapper[5003]: I0104 13:29:53.052252 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9ww5m" Jan 04 13:29:53 crc kubenswrapper[5003]: I0104 13:29:53.054908 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-n76kt"] Jan 04 13:29:53 crc kubenswrapper[5003]: I0104 13:29:53.124641 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d3b5016-2b38-47a5-80b9-2486f9a44120-utilities\") pod \"2d3b5016-2b38-47a5-80b9-2486f9a44120\" (UID: \"2d3b5016-2b38-47a5-80b9-2486f9a44120\") " Jan 04 13:29:53 crc kubenswrapper[5003]: I0104 13:29:53.124820 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7tsmd\" (UniqueName: \"kubernetes.io/projected/2d3b5016-2b38-47a5-80b9-2486f9a44120-kube-api-access-7tsmd\") pod \"2d3b5016-2b38-47a5-80b9-2486f9a44120\" (UID: \"2d3b5016-2b38-47a5-80b9-2486f9a44120\") " Jan 04 13:29:53 crc kubenswrapper[5003]: I0104 13:29:53.124929 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d3b5016-2b38-47a5-80b9-2486f9a44120-catalog-content\") pod \"2d3b5016-2b38-47a5-80b9-2486f9a44120\" (UID: \"2d3b5016-2b38-47a5-80b9-2486f9a44120\") " Jan 04 13:29:53 crc kubenswrapper[5003]: I0104 13:29:53.125843 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d3b5016-2b38-47a5-80b9-2486f9a44120-utilities" (OuterVolumeSpecName: "utilities") pod "2d3b5016-2b38-47a5-80b9-2486f9a44120" (UID: "2d3b5016-2b38-47a5-80b9-2486f9a44120"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:29:53 crc kubenswrapper[5003]: I0104 13:29:53.134348 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d3b5016-2b38-47a5-80b9-2486f9a44120-kube-api-access-7tsmd" (OuterVolumeSpecName: "kube-api-access-7tsmd") pod "2d3b5016-2b38-47a5-80b9-2486f9a44120" (UID: "2d3b5016-2b38-47a5-80b9-2486f9a44120"). InnerVolumeSpecName "kube-api-access-7tsmd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:29:53 crc kubenswrapper[5003]: I0104 13:29:53.202497 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d3b5016-2b38-47a5-80b9-2486f9a44120-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2d3b5016-2b38-47a5-80b9-2486f9a44120" (UID: "2d3b5016-2b38-47a5-80b9-2486f9a44120"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:29:53 crc kubenswrapper[5003]: I0104 13:29:53.227291 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d3b5016-2b38-47a5-80b9-2486f9a44120-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 13:29:53 crc kubenswrapper[5003]: I0104 13:29:53.227328 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d3b5016-2b38-47a5-80b9-2486f9a44120-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 13:29:53 crc kubenswrapper[5003]: I0104 13:29:53.227341 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7tsmd\" (UniqueName: \"kubernetes.io/projected/2d3b5016-2b38-47a5-80b9-2486f9a44120-kube-api-access-7tsmd\") on node \"crc\" DevicePath \"\"" Jan 04 13:29:53 crc kubenswrapper[5003]: I0104 13:29:53.970416 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9ww5m" Jan 04 13:29:54 crc kubenswrapper[5003]: I0104 13:29:54.036128 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9ww5m"] Jan 04 13:29:54 crc kubenswrapper[5003]: I0104 13:29:54.050794 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-9ww5m"] Jan 04 13:29:54 crc kubenswrapper[5003]: I0104 13:29:54.820152 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d3b5016-2b38-47a5-80b9-2486f9a44120" path="/var/lib/kubelet/pods/2d3b5016-2b38-47a5-80b9-2486f9a44120/volumes" Jan 04 13:29:54 crc kubenswrapper[5003]: I0104 13:29:54.822272 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="940c2752-d359-4e0f-bcca-cfb3689750d4" path="/var/lib/kubelet/pods/940c2752-d359-4e0f-bcca-cfb3689750d4/volumes" Jan 04 13:30:00 crc kubenswrapper[5003]: I0104 13:30:00.168361 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458890-csqt7"] Jan 04 13:30:00 crc kubenswrapper[5003]: E0104 13:30:00.170077 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d3b5016-2b38-47a5-80b9-2486f9a44120" containerName="extract-utilities" Jan 04 13:30:00 crc kubenswrapper[5003]: I0104 13:30:00.170115 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d3b5016-2b38-47a5-80b9-2486f9a44120" containerName="extract-utilities" Jan 04 13:30:00 crc kubenswrapper[5003]: E0104 13:30:00.170131 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d3b5016-2b38-47a5-80b9-2486f9a44120" containerName="extract-content" Jan 04 13:30:00 crc kubenswrapper[5003]: I0104 13:30:00.170143 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d3b5016-2b38-47a5-80b9-2486f9a44120" containerName="extract-content" Jan 04 13:30:00 crc kubenswrapper[5003]: E0104 13:30:00.170169 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d3b5016-2b38-47a5-80b9-2486f9a44120" containerName="registry-server" Jan 04 13:30:00 crc kubenswrapper[5003]: I0104 13:30:00.170182 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d3b5016-2b38-47a5-80b9-2486f9a44120" containerName="registry-server" Jan 04 13:30:00 crc kubenswrapper[5003]: I0104 13:30:00.170479 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d3b5016-2b38-47a5-80b9-2486f9a44120" containerName="registry-server" Jan 04 13:30:00 crc kubenswrapper[5003]: I0104 13:30:00.171706 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458890-csqt7" Jan 04 13:30:00 crc kubenswrapper[5003]: I0104 13:30:00.174952 5003 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 04 13:30:00 crc kubenswrapper[5003]: I0104 13:30:00.177487 5003 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 04 13:30:00 crc kubenswrapper[5003]: I0104 13:30:00.184276 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458890-csqt7"] Jan 04 13:30:00 crc kubenswrapper[5003]: I0104 13:30:00.287710 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b72e452b-4036-403e-88e5-2da4c99fdeab-config-volume\") pod \"collect-profiles-29458890-csqt7\" (UID: \"b72e452b-4036-403e-88e5-2da4c99fdeab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458890-csqt7" Jan 04 13:30:00 crc kubenswrapper[5003]: I0104 13:30:00.288101 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vc8rg\" (UniqueName: \"kubernetes.io/projected/b72e452b-4036-403e-88e5-2da4c99fdeab-kube-api-access-vc8rg\") pod \"collect-profiles-29458890-csqt7\" (UID: \"b72e452b-4036-403e-88e5-2da4c99fdeab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458890-csqt7" Jan 04 13:30:00 crc kubenswrapper[5003]: I0104 13:30:00.288164 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b72e452b-4036-403e-88e5-2da4c99fdeab-secret-volume\") pod \"collect-profiles-29458890-csqt7\" (UID: \"b72e452b-4036-403e-88e5-2da4c99fdeab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458890-csqt7" Jan 04 13:30:00 crc kubenswrapper[5003]: I0104 13:30:00.390364 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b72e452b-4036-403e-88e5-2da4c99fdeab-config-volume\") pod \"collect-profiles-29458890-csqt7\" (UID: \"b72e452b-4036-403e-88e5-2da4c99fdeab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458890-csqt7" Jan 04 13:30:00 crc kubenswrapper[5003]: I0104 13:30:00.390564 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vc8rg\" (UniqueName: \"kubernetes.io/projected/b72e452b-4036-403e-88e5-2da4c99fdeab-kube-api-access-vc8rg\") pod \"collect-profiles-29458890-csqt7\" (UID: \"b72e452b-4036-403e-88e5-2da4c99fdeab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458890-csqt7" Jan 04 13:30:00 crc kubenswrapper[5003]: I0104 13:30:00.390598 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b72e452b-4036-403e-88e5-2da4c99fdeab-secret-volume\") pod \"collect-profiles-29458890-csqt7\" (UID: \"b72e452b-4036-403e-88e5-2da4c99fdeab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458890-csqt7" Jan 04 13:30:00 crc kubenswrapper[5003]: I0104 13:30:00.394236 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b72e452b-4036-403e-88e5-2da4c99fdeab-config-volume\") pod \"collect-profiles-29458890-csqt7\" (UID: \"b72e452b-4036-403e-88e5-2da4c99fdeab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458890-csqt7" Jan 04 13:30:00 crc kubenswrapper[5003]: I0104 13:30:00.401401 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b72e452b-4036-403e-88e5-2da4c99fdeab-secret-volume\") pod \"collect-profiles-29458890-csqt7\" (UID: \"b72e452b-4036-403e-88e5-2da4c99fdeab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458890-csqt7" Jan 04 13:30:00 crc kubenswrapper[5003]: I0104 13:30:00.413958 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vc8rg\" (UniqueName: \"kubernetes.io/projected/b72e452b-4036-403e-88e5-2da4c99fdeab-kube-api-access-vc8rg\") pod \"collect-profiles-29458890-csqt7\" (UID: \"b72e452b-4036-403e-88e5-2da4c99fdeab\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29458890-csqt7" Jan 04 13:30:00 crc kubenswrapper[5003]: I0104 13:30:00.504296 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458890-csqt7" Jan 04 13:30:00 crc kubenswrapper[5003]: I0104 13:30:00.805162 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458890-csqt7"] Jan 04 13:30:01 crc kubenswrapper[5003]: I0104 13:30:01.059304 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458890-csqt7" event={"ID":"b72e452b-4036-403e-88e5-2da4c99fdeab","Type":"ContainerStarted","Data":"b7c89ed190abbe843393b07fe926c1ce25eddf661340babd7aa4d8e7454de710"} Jan 04 13:30:01 crc kubenswrapper[5003]: I0104 13:30:01.059906 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458890-csqt7" event={"ID":"b72e452b-4036-403e-88e5-2da4c99fdeab","Type":"ContainerStarted","Data":"e2a887aaa3456e530eecdabe2efa7ef9889efd8083a5386432a6899fb15b0ed3"} Jan 04 13:30:01 crc kubenswrapper[5003]: I0104 13:30:01.083382 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29458890-csqt7" podStartSLOduration=1.0833512 podStartE2EDuration="1.0833512s" podCreationTimestamp="2026-01-04 13:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-04 13:30:01.082027745 +0000 UTC m=+6116.555057586" watchObservedRunningTime="2026-01-04 13:30:01.0833512 +0000 UTC m=+6116.556381081" Jan 04 13:30:02 crc kubenswrapper[5003]: I0104 13:30:02.073399 5003 generic.go:334] "Generic (PLEG): container finished" podID="b72e452b-4036-403e-88e5-2da4c99fdeab" containerID="b7c89ed190abbe843393b07fe926c1ce25eddf661340babd7aa4d8e7454de710" exitCode=0 Jan 04 13:30:02 crc kubenswrapper[5003]: I0104 13:30:02.073494 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458890-csqt7" event={"ID":"b72e452b-4036-403e-88e5-2da4c99fdeab","Type":"ContainerDied","Data":"b7c89ed190abbe843393b07fe926c1ce25eddf661340babd7aa4d8e7454de710"} Jan 04 13:30:03 crc kubenswrapper[5003]: I0104 13:30:03.448225 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458890-csqt7" Jan 04 13:30:03 crc kubenswrapper[5003]: I0104 13:30:03.555645 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b72e452b-4036-403e-88e5-2da4c99fdeab-secret-volume\") pod \"b72e452b-4036-403e-88e5-2da4c99fdeab\" (UID: \"b72e452b-4036-403e-88e5-2da4c99fdeab\") " Jan 04 13:30:03 crc kubenswrapper[5003]: I0104 13:30:03.556432 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b72e452b-4036-403e-88e5-2da4c99fdeab-config-volume\") pod \"b72e452b-4036-403e-88e5-2da4c99fdeab\" (UID: \"b72e452b-4036-403e-88e5-2da4c99fdeab\") " Jan 04 13:30:03 crc kubenswrapper[5003]: I0104 13:30:03.556490 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vc8rg\" (UniqueName: \"kubernetes.io/projected/b72e452b-4036-403e-88e5-2da4c99fdeab-kube-api-access-vc8rg\") pod \"b72e452b-4036-403e-88e5-2da4c99fdeab\" (UID: \"b72e452b-4036-403e-88e5-2da4c99fdeab\") " Jan 04 13:30:03 crc kubenswrapper[5003]: I0104 13:30:03.557490 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b72e452b-4036-403e-88e5-2da4c99fdeab-config-volume" (OuterVolumeSpecName: "config-volume") pod "b72e452b-4036-403e-88e5-2da4c99fdeab" (UID: "b72e452b-4036-403e-88e5-2da4c99fdeab"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 04 13:30:03 crc kubenswrapper[5003]: I0104 13:30:03.562839 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b72e452b-4036-403e-88e5-2da4c99fdeab-kube-api-access-vc8rg" (OuterVolumeSpecName: "kube-api-access-vc8rg") pod "b72e452b-4036-403e-88e5-2da4c99fdeab" (UID: "b72e452b-4036-403e-88e5-2da4c99fdeab"). InnerVolumeSpecName "kube-api-access-vc8rg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:30:03 crc kubenswrapper[5003]: I0104 13:30:03.562834 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b72e452b-4036-403e-88e5-2da4c99fdeab-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b72e452b-4036-403e-88e5-2da4c99fdeab" (UID: "b72e452b-4036-403e-88e5-2da4c99fdeab"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 04 13:30:03 crc kubenswrapper[5003]: I0104 13:30:03.659541 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vc8rg\" (UniqueName: \"kubernetes.io/projected/b72e452b-4036-403e-88e5-2da4c99fdeab-kube-api-access-vc8rg\") on node \"crc\" DevicePath \"\"" Jan 04 13:30:03 crc kubenswrapper[5003]: I0104 13:30:03.659600 5003 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b72e452b-4036-403e-88e5-2da4c99fdeab-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 04 13:30:03 crc kubenswrapper[5003]: I0104 13:30:03.659619 5003 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b72e452b-4036-403e-88e5-2da4c99fdeab-config-volume\") on node \"crc\" DevicePath \"\"" Jan 04 13:30:04 crc kubenswrapper[5003]: I0104 13:30:04.094273 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29458890-csqt7" event={"ID":"b72e452b-4036-403e-88e5-2da4c99fdeab","Type":"ContainerDied","Data":"e2a887aaa3456e530eecdabe2efa7ef9889efd8083a5386432a6899fb15b0ed3"} Jan 04 13:30:04 crc kubenswrapper[5003]: I0104 13:30:04.094321 5003 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e2a887aaa3456e530eecdabe2efa7ef9889efd8083a5386432a6899fb15b0ed3" Jan 04 13:30:04 crc kubenswrapper[5003]: I0104 13:30:04.094362 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29458890-csqt7" Jan 04 13:30:04 crc kubenswrapper[5003]: I0104 13:30:04.552860 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458845-8mwmv"] Jan 04 13:30:04 crc kubenswrapper[5003]: I0104 13:30:04.566862 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29458845-8mwmv"] Jan 04 13:30:04 crc kubenswrapper[5003]: I0104 13:30:04.827082 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bad35f44-6393-477a-bd8d-89eee9d7b405" path="/var/lib/kubelet/pods/bad35f44-6393-477a-bd8d-89eee9d7b405/volumes" Jan 04 13:30:09 crc kubenswrapper[5003]: I0104 13:30:09.418215 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 13:30:09 crc kubenswrapper[5003]: I0104 13:30:09.419163 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 04 13:30:15 crc kubenswrapper[5003]: I0104 13:30:15.852976 5003 scope.go:117] "RemoveContainer" containerID="853fffb292aabb26d25e0a179ca851239d0eecd35c7bcaafec51e3df740a6a65" Jan 04 13:30:15 crc kubenswrapper[5003]: I0104 13:30:15.879972 5003 scope.go:117] "RemoveContainer" containerID="4948f7f30f847eb6e7092bbc4ec4e14e6c849f55cec819f0ffc81fc964bace2e" Jan 04 13:30:15 crc kubenswrapper[5003]: I0104 13:30:15.931773 5003 scope.go:117] "RemoveContainer" containerID="7436c743fbfb0afd11294e951a578985f3be67ebcdd4e2e8d2f5aa311ca1735f" Jan 04 13:30:15 crc kubenswrapper[5003]: I0104 13:30:15.989331 5003 scope.go:117] "RemoveContainer" containerID="6c15ebb154fe37b5d414d9c2d3d6739aa5eb0728597ef302d4a5e3e1c0fa7ed5" Jan 04 13:30:16 crc kubenswrapper[5003]: I0104 13:30:16.042580 5003 scope.go:117] "RemoveContainer" containerID="73ef38f7b451e0361a06d4b95a7921df33b1aa0769f0c41f275ef2328e599f69" Jan 04 13:30:16 crc kubenswrapper[5003]: I0104 13:30:16.070776 5003 scope.go:117] "RemoveContainer" containerID="edf633f3a0fdcb51d4fba5994408f07c9258fe0a35404de1aa1a701716f125fe" Jan 04 13:30:18 crc kubenswrapper[5003]: I0104 13:30:18.922622 5003 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qqcxd"] Jan 04 13:30:18 crc kubenswrapper[5003]: E0104 13:30:18.924058 5003 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b72e452b-4036-403e-88e5-2da4c99fdeab" containerName="collect-profiles" Jan 04 13:30:18 crc kubenswrapper[5003]: I0104 13:30:18.924093 5003 state_mem.go:107] "Deleted CPUSet assignment" podUID="b72e452b-4036-403e-88e5-2da4c99fdeab" containerName="collect-profiles" Jan 04 13:30:18 crc kubenswrapper[5003]: I0104 13:30:18.924500 5003 memory_manager.go:354] "RemoveStaleState removing state" podUID="b72e452b-4036-403e-88e5-2da4c99fdeab" containerName="collect-profiles" Jan 04 13:30:18 crc kubenswrapper[5003]: I0104 13:30:18.927808 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qqcxd" Jan 04 13:30:18 crc kubenswrapper[5003]: I0104 13:30:18.938475 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qqcxd"] Jan 04 13:30:18 crc kubenswrapper[5003]: I0104 13:30:18.961876 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecc1ca09-d8fb-40ec-8de4-f081a08907f3-catalog-content\") pod \"redhat-marketplace-qqcxd\" (UID: \"ecc1ca09-d8fb-40ec-8de4-f081a08907f3\") " pod="openshift-marketplace/redhat-marketplace-qqcxd" Jan 04 13:30:18 crc kubenswrapper[5003]: I0104 13:30:18.962500 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecc1ca09-d8fb-40ec-8de4-f081a08907f3-utilities\") pod \"redhat-marketplace-qqcxd\" (UID: \"ecc1ca09-d8fb-40ec-8de4-f081a08907f3\") " pod="openshift-marketplace/redhat-marketplace-qqcxd" Jan 04 13:30:18 crc kubenswrapper[5003]: I0104 13:30:18.962595 5003 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rrgft\" (UniqueName: \"kubernetes.io/projected/ecc1ca09-d8fb-40ec-8de4-f081a08907f3-kube-api-access-rrgft\") pod \"redhat-marketplace-qqcxd\" (UID: \"ecc1ca09-d8fb-40ec-8de4-f081a08907f3\") " pod="openshift-marketplace/redhat-marketplace-qqcxd" Jan 04 13:30:19 crc kubenswrapper[5003]: I0104 13:30:19.065735 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecc1ca09-d8fb-40ec-8de4-f081a08907f3-utilities\") pod \"redhat-marketplace-qqcxd\" (UID: \"ecc1ca09-d8fb-40ec-8de4-f081a08907f3\") " pod="openshift-marketplace/redhat-marketplace-qqcxd" Jan 04 13:30:19 crc kubenswrapper[5003]: I0104 13:30:19.065883 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rrgft\" (UniqueName: \"kubernetes.io/projected/ecc1ca09-d8fb-40ec-8de4-f081a08907f3-kube-api-access-rrgft\") pod \"redhat-marketplace-qqcxd\" (UID: \"ecc1ca09-d8fb-40ec-8de4-f081a08907f3\") " pod="openshift-marketplace/redhat-marketplace-qqcxd" Jan 04 13:30:19 crc kubenswrapper[5003]: I0104 13:30:19.066072 5003 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecc1ca09-d8fb-40ec-8de4-f081a08907f3-catalog-content\") pod \"redhat-marketplace-qqcxd\" (UID: \"ecc1ca09-d8fb-40ec-8de4-f081a08907f3\") " pod="openshift-marketplace/redhat-marketplace-qqcxd" Jan 04 13:30:19 crc kubenswrapper[5003]: I0104 13:30:19.067086 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecc1ca09-d8fb-40ec-8de4-f081a08907f3-utilities\") pod \"redhat-marketplace-qqcxd\" (UID: \"ecc1ca09-d8fb-40ec-8de4-f081a08907f3\") " pod="openshift-marketplace/redhat-marketplace-qqcxd" Jan 04 13:30:19 crc kubenswrapper[5003]: I0104 13:30:19.067166 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecc1ca09-d8fb-40ec-8de4-f081a08907f3-catalog-content\") pod \"redhat-marketplace-qqcxd\" (UID: \"ecc1ca09-d8fb-40ec-8de4-f081a08907f3\") " pod="openshift-marketplace/redhat-marketplace-qqcxd" Jan 04 13:30:19 crc kubenswrapper[5003]: I0104 13:30:19.100626 5003 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rrgft\" (UniqueName: \"kubernetes.io/projected/ecc1ca09-d8fb-40ec-8de4-f081a08907f3-kube-api-access-rrgft\") pod \"redhat-marketplace-qqcxd\" (UID: \"ecc1ca09-d8fb-40ec-8de4-f081a08907f3\") " pod="openshift-marketplace/redhat-marketplace-qqcxd" Jan 04 13:30:19 crc kubenswrapper[5003]: I0104 13:30:19.276660 5003 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qqcxd" Jan 04 13:30:19 crc kubenswrapper[5003]: I0104 13:30:19.810299 5003 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qqcxd"] Jan 04 13:30:20 crc kubenswrapper[5003]: I0104 13:30:20.293283 5003 generic.go:334] "Generic (PLEG): container finished" podID="ecc1ca09-d8fb-40ec-8de4-f081a08907f3" containerID="fd76107214d6d9622ccb4d67006235976336bd1842a21074d2b201c47d897084" exitCode=0 Jan 04 13:30:20 crc kubenswrapper[5003]: I0104 13:30:20.293412 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qqcxd" event={"ID":"ecc1ca09-d8fb-40ec-8de4-f081a08907f3","Type":"ContainerDied","Data":"fd76107214d6d9622ccb4d67006235976336bd1842a21074d2b201c47d897084"} Jan 04 13:30:20 crc kubenswrapper[5003]: I0104 13:30:20.293830 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qqcxd" event={"ID":"ecc1ca09-d8fb-40ec-8de4-f081a08907f3","Type":"ContainerStarted","Data":"36d8ec2cb21d0f51e423087a8059902eb0a70abfa79da7a35505c4ebdbb74ca7"} Jan 04 13:30:21 crc kubenswrapper[5003]: I0104 13:30:21.325314 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qqcxd" event={"ID":"ecc1ca09-d8fb-40ec-8de4-f081a08907f3","Type":"ContainerStarted","Data":"4e83a4a498c35f516a77db034f19dd66c574ea32ee7eda45a8c8276d84bd5b81"} Jan 04 13:30:22 crc kubenswrapper[5003]: I0104 13:30:22.339277 5003 generic.go:334] "Generic (PLEG): container finished" podID="ecc1ca09-d8fb-40ec-8de4-f081a08907f3" containerID="4e83a4a498c35f516a77db034f19dd66c574ea32ee7eda45a8c8276d84bd5b81" exitCode=0 Jan 04 13:30:22 crc kubenswrapper[5003]: I0104 13:30:22.339336 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qqcxd" event={"ID":"ecc1ca09-d8fb-40ec-8de4-f081a08907f3","Type":"ContainerDied","Data":"4e83a4a498c35f516a77db034f19dd66c574ea32ee7eda45a8c8276d84bd5b81"} Jan 04 13:30:23 crc kubenswrapper[5003]: I0104 13:30:23.353096 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qqcxd" event={"ID":"ecc1ca09-d8fb-40ec-8de4-f081a08907f3","Type":"ContainerStarted","Data":"2853ae87f290abc4b2b34956611bee4c1a5529b1556748e5ed14a87fb143413d"} Jan 04 13:30:23 crc kubenswrapper[5003]: I0104 13:30:23.387592 5003 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qqcxd" podStartSLOduration=2.886360594 podStartE2EDuration="5.38756579s" podCreationTimestamp="2026-01-04 13:30:18 +0000 UTC" firstStartedPulling="2026-01-04 13:30:20.298406239 +0000 UTC m=+6135.771436120" lastFinishedPulling="2026-01-04 13:30:22.799611475 +0000 UTC m=+6138.272641316" observedRunningTime="2026-01-04 13:30:23.379485786 +0000 UTC m=+6138.852515637" watchObservedRunningTime="2026-01-04 13:30:23.38756579 +0000 UTC m=+6138.860595631" Jan 04 13:30:29 crc kubenswrapper[5003]: I0104 13:30:29.277649 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qqcxd" Jan 04 13:30:29 crc kubenswrapper[5003]: I0104 13:30:29.278564 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qqcxd" Jan 04 13:30:29 crc kubenswrapper[5003]: I0104 13:30:29.371064 5003 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qqcxd" Jan 04 13:30:29 crc kubenswrapper[5003]: I0104 13:30:29.503601 5003 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qqcxd" Jan 04 13:30:29 crc kubenswrapper[5003]: I0104 13:30:29.651110 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qqcxd"] Jan 04 13:30:31 crc kubenswrapper[5003]: I0104 13:30:31.441115 5003 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qqcxd" podUID="ecc1ca09-d8fb-40ec-8de4-f081a08907f3" containerName="registry-server" containerID="cri-o://2853ae87f290abc4b2b34956611bee4c1a5529b1556748e5ed14a87fb143413d" gracePeriod=2 Jan 04 13:30:32 crc kubenswrapper[5003]: I0104 13:30:32.459981 5003 generic.go:334] "Generic (PLEG): container finished" podID="ecc1ca09-d8fb-40ec-8de4-f081a08907f3" containerID="2853ae87f290abc4b2b34956611bee4c1a5529b1556748e5ed14a87fb143413d" exitCode=0 Jan 04 13:30:32 crc kubenswrapper[5003]: I0104 13:30:32.460096 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qqcxd" event={"ID":"ecc1ca09-d8fb-40ec-8de4-f081a08907f3","Type":"ContainerDied","Data":"2853ae87f290abc4b2b34956611bee4c1a5529b1556748e5ed14a87fb143413d"} Jan 04 13:30:32 crc kubenswrapper[5003]: I0104 13:30:32.625761 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qqcxd" Jan 04 13:30:32 crc kubenswrapper[5003]: I0104 13:30:32.770514 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecc1ca09-d8fb-40ec-8de4-f081a08907f3-utilities\") pod \"ecc1ca09-d8fb-40ec-8de4-f081a08907f3\" (UID: \"ecc1ca09-d8fb-40ec-8de4-f081a08907f3\") " Jan 04 13:30:32 crc kubenswrapper[5003]: I0104 13:30:32.770799 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecc1ca09-d8fb-40ec-8de4-f081a08907f3-catalog-content\") pod \"ecc1ca09-d8fb-40ec-8de4-f081a08907f3\" (UID: \"ecc1ca09-d8fb-40ec-8de4-f081a08907f3\") " Jan 04 13:30:32 crc kubenswrapper[5003]: I0104 13:30:32.770825 5003 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rrgft\" (UniqueName: \"kubernetes.io/projected/ecc1ca09-d8fb-40ec-8de4-f081a08907f3-kube-api-access-rrgft\") pod \"ecc1ca09-d8fb-40ec-8de4-f081a08907f3\" (UID: \"ecc1ca09-d8fb-40ec-8de4-f081a08907f3\") " Jan 04 13:30:32 crc kubenswrapper[5003]: I0104 13:30:32.772445 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ecc1ca09-d8fb-40ec-8de4-f081a08907f3-utilities" (OuterVolumeSpecName: "utilities") pod "ecc1ca09-d8fb-40ec-8de4-f081a08907f3" (UID: "ecc1ca09-d8fb-40ec-8de4-f081a08907f3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:30:32 crc kubenswrapper[5003]: I0104 13:30:32.779889 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecc1ca09-d8fb-40ec-8de4-f081a08907f3-kube-api-access-rrgft" (OuterVolumeSpecName: "kube-api-access-rrgft") pod "ecc1ca09-d8fb-40ec-8de4-f081a08907f3" (UID: "ecc1ca09-d8fb-40ec-8de4-f081a08907f3"). InnerVolumeSpecName "kube-api-access-rrgft". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 04 13:30:32 crc kubenswrapper[5003]: I0104 13:30:32.802247 5003 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ecc1ca09-d8fb-40ec-8de4-f081a08907f3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ecc1ca09-d8fb-40ec-8de4-f081a08907f3" (UID: "ecc1ca09-d8fb-40ec-8de4-f081a08907f3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 04 13:30:32 crc kubenswrapper[5003]: I0104 13:30:32.872982 5003 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ecc1ca09-d8fb-40ec-8de4-f081a08907f3-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 04 13:30:32 crc kubenswrapper[5003]: I0104 13:30:32.873049 5003 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rrgft\" (UniqueName: \"kubernetes.io/projected/ecc1ca09-d8fb-40ec-8de4-f081a08907f3-kube-api-access-rrgft\") on node \"crc\" DevicePath \"\"" Jan 04 13:30:32 crc kubenswrapper[5003]: I0104 13:30:32.873067 5003 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ecc1ca09-d8fb-40ec-8de4-f081a08907f3-utilities\") on node \"crc\" DevicePath \"\"" Jan 04 13:30:33 crc kubenswrapper[5003]: I0104 13:30:33.474033 5003 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qqcxd" event={"ID":"ecc1ca09-d8fb-40ec-8de4-f081a08907f3","Type":"ContainerDied","Data":"36d8ec2cb21d0f51e423087a8059902eb0a70abfa79da7a35505c4ebdbb74ca7"} Jan 04 13:30:33 crc kubenswrapper[5003]: I0104 13:30:33.474244 5003 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qqcxd" Jan 04 13:30:33 crc kubenswrapper[5003]: I0104 13:30:33.474419 5003 scope.go:117] "RemoveContainer" containerID="2853ae87f290abc4b2b34956611bee4c1a5529b1556748e5ed14a87fb143413d" Jan 04 13:30:33 crc kubenswrapper[5003]: I0104 13:30:33.508362 5003 scope.go:117] "RemoveContainer" containerID="4e83a4a498c35f516a77db034f19dd66c574ea32ee7eda45a8c8276d84bd5b81" Jan 04 13:30:33 crc kubenswrapper[5003]: I0104 13:30:33.508525 5003 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qqcxd"] Jan 04 13:30:33 crc kubenswrapper[5003]: I0104 13:30:33.515536 5003 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qqcxd"] Jan 04 13:30:33 crc kubenswrapper[5003]: I0104 13:30:33.533237 5003 scope.go:117] "RemoveContainer" containerID="fd76107214d6d9622ccb4d67006235976336bd1842a21074d2b201c47d897084" Jan 04 13:30:34 crc kubenswrapper[5003]: I0104 13:30:34.823152 5003 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ecc1ca09-d8fb-40ec-8de4-f081a08907f3" path="/var/lib/kubelet/pods/ecc1ca09-d8fb-40ec-8de4-f081a08907f3/volumes" Jan 04 13:30:39 crc kubenswrapper[5003]: I0104 13:30:39.418370 5003 patch_prober.go:28] interesting pod/machine-config-daemon-rcgwp container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 04 13:30:39 crc kubenswrapper[5003]: I0104 13:30:39.419287 5003 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rcgwp" podUID="1d785a1a-7eaf-4192-915a-49f478c2a59a" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515126465640024456 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015126465641017374 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015126451231016505 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015126451231015455 5ustar corecore